prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
<|fim_middle|>
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | """
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
<|fim_middle|>
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | model = ProductPresentation |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
<|fim_middle|>
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | """
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
<|fim_middle|>
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | model = ModeOfAdministration |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
<|fim_middle|>
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | """
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
<|fim_middle|>
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
<|fim▁end|> | model = ProductItem |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
<|fim_middle|>
<|fim▁end|> | """
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
core/api/serializers.py is the module for core model api data serializers
"""
#import core django module
from django.contrib.auth.models import User, Permission
#import external modules
from rest_framework import serializers
#import project modules
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
<|fim_middle|>
<|fim▁end|> | model = ProductFormulation |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):<|fim▁hole|> # apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)<|fim▁end|> | # Unit test, no testimony markers
continue
|
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
<|fim_middle|>
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | """Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
<|fim_middle|>
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | """Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
<|fim_middle|>
<|fim▁end|> | """Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
<|fim_middle|>
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | continue |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
<|fim_middle|>
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | item.add_marker(pytest.mark.component(doc_component[0])) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
<|fim_middle|>
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | item.add_marker(pytest.mark.importance(doc_importance[0])) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
<|fim_middle|>
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | item.add_marker(pytest.mark.assignee(doc_assignee[0])) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
<|fim_middle|>
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item) |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
<|fim_middle|>
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
<|fim_middle|>
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
<|fim_middle|>
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def <|fim_middle|>(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | pytest_addoption |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def <|fim_middle|>(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | pytest_configure |
<|file_name|>testimony_markers.py<|end_file_name|><|fim▁begin|>import inspect
import re
import pytest
from robottelo.logging import collection_logger as logger
IMPORTANCE_LEVELS = []
def pytest_addoption(parser):
"""Add CLI options related to Testimony token based mark collection"""
parser.addoption(
'--importance',
help='Comma separated list of importance levels to include in test collection',
)
parser.addoption(
'--component',
help='Comma separated list of component names to include in test collection',
)
parser.addoption(
'--assignee',
help='Comma separated list of assignees to include in test collection',
)
def pytest_configure(config):
"""Register markers related to testimony tokens"""
for marker in [
'importance: CaseImportance testimony token, use --importance to filter',
'component: Component testimony token, use --component to filter',
'assignee: Assignee testimony token, use --assignee to filter',
]:
config.addinivalue_line("markers", marker)
component_regex = re.compile(
# To match :CaseComponent: FooBar
r'\s*:CaseComponent:\s*(?P<component>\S*)',
re.IGNORECASE,
)
importance_regex = re.compile(
# To match :CaseImportance: Critical
r'\s*:CaseImportance:\s*(?P<importance>\S*)',
re.IGNORECASE,
)
assignee_regex = re.compile(
# To match :Assignee: jsmith
r'\s*:Assignee:\s*(?P<assignee>\S*)',
re.IGNORECASE,
)
@pytest.hookimpl(tryfirst=True)
def <|fim_middle|>(session, items, config):
"""Add markers for testimony tokens"""
# split the option string and handle no option, single option, multiple
# config.getoption(default) doesn't work like you think it does, hence or ''
importance = [i for i in (config.getoption('importance') or '').split(',') if i != '']
component = [c for c in (config.getoption('component') or '').split(',') if c != '']
assignee = [a for a in (config.getoption('assignee') or '').split(',') if a != '']
selected = []
deselected = []
logger.info('Processing test items to add testimony token markers')
for item in items:
if item.nodeid.startswith('tests/robottelo/'):
# Unit test, no testimony markers
continue
# apply the marks for importance, component, and assignee
# Find matches from docstrings starting at smallest scope
item_docstrings = [
d
for d in map(inspect.getdoc, (item.function, getattr(item, 'cls', None), item.module))
if d is not None
]
item_mark_names = [m.name for m in item.iter_markers()]
for docstring in item_docstrings:
# Add marker starting at smallest docstring scope
# only add the mark if it hasn't already been applied at a lower scope
doc_component = component_regex.findall(docstring)
if doc_component and 'component' not in item_mark_names:
item.add_marker(pytest.mark.component(doc_component[0]))
doc_importance = importance_regex.findall(docstring)
if doc_importance and 'importance' not in item_mark_names:
item.add_marker(pytest.mark.importance(doc_importance[0]))
doc_assignee = assignee_regex.findall(docstring)
if doc_assignee and 'assignee' not in item_mark_names:
item.add_marker(pytest.mark.assignee(doc_assignee[0]))
# exit early if no filters were passed
if importance or component or assignee:
# Filter test collection based on CLI options for filtering
# filters should be applied together
# such that --component Repository --importance Critical --assignee jsmith
# only collects tests which have all three of these marks
# https://github.com/pytest-dev/pytest/issues/1373 Will make this way easier
# testimony requires both importance and component, this will blow up if its forgotten
importance_marker = item.get_closest_marker('importance').args[0]
if importance and importance_marker not in importance:
logger.debug(
f'Deselected test {item.nodeid} due to "--importance {importance}",'
f'test has importance mark: {importance_marker}'
)
deselected.append(item)
continue
component_marker = item.get_closest_marker('component').args[0]
if component and component_marker not in component:
logger.debug(
f'Deselected test {item.nodeid} due to "--component {component}",'
f'test has component mark: {component_marker}'
)
deselected.append(item)
continue
assignee_marker = item.get_closest_marker('assignee').args[0]
if assignee and assignee_marker not in assignee:
logger.debug(
f'Deselected test {item.nodeid} due to "--assignee {assignee}",'
f'test has assignee mark: {assignee_marker}'
)
deselected.append(item)
continue
selected.append(item)
# selected will be empty if no filter option was passed, defaulting to full items list
items[:] = selected if deselected else items
config.hook.pytest_deselected(items=deselected)
<|fim▁end|> | pytest_collection_modifyitems |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
<|fim▁hole|> return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)<|fim▁end|> | if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.') |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
<|fim_middle|>
<|fim▁end|> | def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
<|fim_middle|>
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager() |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
<|fim_middle|>
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager() |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
<|fim_middle|>
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
<|fim_middle|>
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
<|fim_middle|>
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
<|fim_middle|>
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
<|fim_middle|>
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
<|fim_middle|>
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
<|fim_middle|>
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
<|fim_middle|>
<|fim▁end|> | if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
<|fim_middle|>
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__sub_ids=range(len(self.__task.data['jobvar'])) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
<|fim_middle|>
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | return |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
<|fim_middle|>
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
<|fim_middle|>
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
<|fim_middle|>
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
<|fim_middle|>
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
<|fim_middle|>
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
<|fim_middle|>
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | result = {} |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
<|fim_middle|>
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | njobs = len(result['backend_job_ids']) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
<|fim_middle|>
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | njobs = len(result) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
<|fim_middle|>
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | self.__logger.info('%d jobs successfully submitted to backend.'%(njobs)) |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def <|fim_middle|>(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __init__ |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def <|fim_middle|>(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __initialize_manager |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def <|fim_middle|>(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | handle |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def <|fim_middle|>(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_input |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def <|fim_middle|>(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_context |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def <|fim_middle|>(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_action |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def <|fim_middle|>(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_navigator |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def <|fim_middle|>(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_bootstrap |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def <|fim_middle|>(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __create_launcher |
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def <|fim_middle|>(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)
<|fim▁end|> | __submit |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
__version__ = "20.0.dev0"
def main(args=None):
# type: (Optional[List[str]]) -> int
"""This is an internal API only meant for use by pip's own console scripts.<|fim▁hole|> from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)<|fim▁end|> |
For additional details, see https://github.com/pypa/pip/issues/7498.
""" |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
__version__ = "20.0.dev0"
def main(args=None):
# type: (Optional[List[str]]) -> int
<|fim_middle|>
<|fim▁end|> | """This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
<|fim_middle|>
__version__ = "20.0.dev0"
def main(args=None):
# type: (Optional[List[str]]) -> int
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)
<|fim▁end|> | from typing import List, Optional |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
__version__ = "20.0.dev0"
def <|fim_middle|>(args=None):
# type: (Optional[List[str]]) -> int
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)
<|fim▁end|> | main |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
<|fim▁hole|> 'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")<|fim▁end|> | output = {
'success': success,
'task_id': task_id,
'value': value, |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
<|fim_middle|>
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
<|fim▁end|> | """
An empty view
"""
return HttpResponse() |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
def celery_status(_):
<|fim_middle|>
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
<|fim▁end|> | """
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json") |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def celery_ping(_):
<|fim_middle|>
<|fim▁end|> | """
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json") |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def <|fim_middle|>(_):
"""
An empty view
"""
return HttpResponse()
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
<|fim▁end|> | index |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
def <|fim_middle|>(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
<|fim▁end|> | celery_status |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Django Views for service status app
"""
from __future__ import absolute_import
import json
import time
from celery.exceptions import TimeoutError
from django.http import HttpResponse
from djcelery import celery
from openedx.core.djangoapps.service_status.tasks import delayed_ping
def index(_):
"""
An empty view
"""
return HttpResponse()
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
content_type="application/json")
def <|fim_middle|>(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
content_type="application/json")
<|fim▁end|> | celery_ping |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
<|fim▁hole|> if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()<|fim▁end|> | This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
<|fim_middle|>
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno()) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
<|fim_middle|>
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno()) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
<|fim_middle|>
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
<|fim_middle|>
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno()) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
<|fim_middle|>
<|fim▁end|> | """I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost() |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
<|fim_middle|>
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | """Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
<|fim_middle|>
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | """Write some data to standard output.
"""
self.writer.write(data) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
<|fim_middle|>
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | """Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived) |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
<|fim_middle|>
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | """Close standard input.
"""
self.writer.loseConnection() |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
<|fim_middle|>
<|fim▁end|> | """The connection was lost.
"""
self.protocol.connectionLost() |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
<|fim_middle|>
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | return 0 |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
<|fim_middle|>
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | return 0 |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
<|fim_middle|>
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | return CONNECTION_LOST |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
<|fim_middle|>
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | return 0 |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
<|fim_middle|>
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | raise RuntimeError, "Standard IO already in use." |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def <|fim_middle|>(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | __init__ |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def <|fim_middle|>(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | writeSomeData |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def <|fim_middle|>(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | connectionLost |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def <|fim_middle|>(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | __init__ |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def <|fim_middle|>(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | write |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def <|fim_middle|>(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | doRead |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def <|fim_middle|>(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | closeStdin |
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>}
"""
# system imports
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlocking(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def <|fim_middle|>(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
<|fim▁end|> | connectionLost |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """<|fim▁hole|> eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)<|fim▁end|> | dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
<|fim_middle|>
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | return BeautifulSoup(html_data) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
<|fim_middle|>
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
<|fim_middle|>
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | self.netloc = 'foo.com'
self.internal_sites = {} |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
<|fim_middle|>
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | """ The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource") |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
<|fim_middle|>
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | """ The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
<|fim_middle|>
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | """ The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
<|fim_middle|>
def test_embedded_tweet_widget_splitted(self):
""" The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | """ The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites) |
<|file_name|>test_media_utils.py<|end_file_name|><|fim▁begin|>"""
Test suite for the embedded <script> extraction
"""
from BeautifulSoup import BeautifulSoup
from nose.tools import raises, eq_
from csxj.datasources.parser_tools import media_utils
from csxj.datasources.parser_tools import twitter_utils
from tests.datasources.parser_tools import test_twitter_utils
def make_soup(html_data):
return BeautifulSoup(html_data)
class TestMediaUtils(object):
def setUp(self):
self.netloc = 'foo.com'
self.internal_sites = {}
def test_embedded_script(self):
""" The embedded <script> extraction works on a simple embedded script with <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
* <noscript>
<a href='http://bar.com/some_resource'>Disabled JS, go here</a>
</noscript>
</div>
"""
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
eq_(tagged_URL.URL, "http://bar.com/some_resource")
@raises(ValueError)
def test_embedded_script_without_noscript_fallback(self):
""" The embedded <script> extraction raises a ValueError exception when encountering a script without <noscript> fallback """
html_data = """
<div>
<script src='http://bar.com/some_widget.js'>
</script>
</div>
"""
soup = make_soup(html_data)
media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
def test_embeded_tweet_widget(self):
""" The embedded <script> extraction returns a link to a twitter resource when the script is a twitter widget """
html_data = """
<div>
<script src={0}>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags)
@raises(ValueError)
def test_embedded_javascript_code(self):
""" The embedded <script> extraction raises a ValueError when processing a <script> tag with arbitrary Javascript code inside """
js_content = """<script type='text/javascript'>var pokey='penguin'; </script>"""
soup = make_soup(js_content)
media_utils.extract_tagged_url_from_embedded_script(soup, self.netloc, self.internal_sites)
def test_embedded_tweet_widget_splitted(self):
<|fim_middle|>
class TestDewPlayer(object):
def test_simple_url_extraction(self):
""" media_utils.extract_source_url_from_dewplayer() can extract he url to an mp3 file from an embedded dewplayer object. """
dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?mp3=http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
expected_mp3_url = "http://podcast.dhnet.be/articles/audio_dh_388635_1331708882.mp3"
extracted_url = media_utils.extract_source_url_from_dewplayer(dewplayer_url)
eq_(expected_mp3_url, extracted_url)
@raises(ValueError)
def test_empty_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an empty string """
media_utils.extract_source_url_from_dewplayer("")
@raises(ValueError)
def test_bad_query_url(self):
""" media_utils.extract_source_url_from_dewplayer() raises ValueError when fed an unknown dewplayer query """
wrong_dewplayer_url = "http://download.saipm.com/flash/dewplayer/dewplayer.swf?foo=bar"
media_utils.extract_source_url_from_dewplayer(wrong_dewplayer_url)
<|fim▁end|> | """ The embedded <script> extraction should work when an embedded tweet is split between the widget.js inclusion and the actual javascript code to instantiate it."""
html_data = """
<div>
<script src={0}></script>
<script>
{1}
</script>
</div>
""".format(twitter_utils.TWITTER_WIDGET_SCRIPT_URL, test_twitter_utils.SAMPLE_TWIMG_PROFILE)
soup = make_soup(html_data)
tagged_URL = media_utils.extract_tagged_url_from_embedded_script(soup.script, self.netloc, self.internal_sites)
expected_tags = set(['twitter widget', 'twitter profile', 'script', 'external', 'embedded'])
eq_(tagged_URL.tags, expected_tags) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.