index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
69,552 |
mailchimp_marketing.api.surveys_api
|
unpublish_survey_with_http_info
|
Unpublish a Survey # noqa: E501
Unpublish a survey that has been published. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unpublish_survey_with_http_info(list_id, survey_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: The unique ID for the list. (required)
:param str survey_id: The ID of the survey. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def unpublish_survey_with_http_info(self, list_id, survey_id, **kwargs): # noqa: E501
"""Unpublish a Survey # noqa: E501
Unpublish a survey that has been published. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unpublish_survey_with_http_info(list_id, survey_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str list_id: The unique ID for the list. (required)
:param str survey_id: The ID of the survey. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['list_id', 'survey_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unpublish_survey" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'list_id' is set
if ('list_id' not in params or
params['list_id'] is None):
raise ValueError("Missing the required parameter `list_id` when calling ``") # noqa: E501
# verify the required parameter 'survey_id' is set
if ('survey_id' not in params or
params['survey_id'] is None):
raise ValueError("Missing the required parameter `survey_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'list_id' in params:
path_params['list_id'] = params['list_id'] # noqa: E501
if 'survey_id' in params:
path_params['survey_id'] = params['survey_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/lists/{list_id}/surveys/{survey_id}/actions/unpublish', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, list_id, survey_id, **kwargs)
|
69,553 |
mailchimp_marketing.api.template_folders_api
|
TemplateFoldersApi
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
|
class TemplateFoldersApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def remove(self, folder_id, **kwargs): # noqa: E501
"""Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_with_http_info(folder_id, **kwargs) # noqa: E501
else:
(data) = self.remove_with_http_info(folder_id, **kwargs) # noqa: E501
return data
def remove_with_http_info(self, folder_id, **kwargs): # noqa: E501
"""Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs): # noqa: E501
"""List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
def list_with_http_info(self, **kwargs): # noqa: E501
"""List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolders', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get(self, folder_id, **kwargs): # noqa: E501
"""Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_with_http_info(folder_id, **kwargs) # noqa: E501
else:
(data) = self.get_with_http_info(folder_id, **kwargs) # noqa: E501
return data
def get_with_http_info(self, folder_id, **kwargs): # noqa: E501
"""Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, folder_id, body, **kwargs): # noqa: E501
"""Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_with_http_info(folder_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_with_http_info(folder_id, body, **kwargs) # noqa: E501
return data
def update_with_http_info(self, folder_id, body, **kwargs): # noqa: E501
"""Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_with_http_info(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, body, **kwargs): # noqa: E501
"""Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(api_client)
|
69,555 |
mailchimp_marketing.api.template_folders_api
|
create
|
Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def create(self, body, **kwargs): # noqa: E501
"""Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
|
(self, body, **kwargs)
|
69,556 |
mailchimp_marketing.api.template_folders_api
|
create_with_http_info
|
Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add template folder # noqa: E501
Create a new template folder. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateFolder1 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, body, **kwargs)
|
69,557 |
mailchimp_marketing.api.template_folders_api
|
get
|
Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def get(self, folder_id, **kwargs): # noqa: E501
"""Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_with_http_info(folder_id, **kwargs) # noqa: E501
else:
(data) = self.get_with_http_info(folder_id, **kwargs) # noqa: E501
return data
|
(self, folder_id, **kwargs)
|
69,558 |
mailchimp_marketing.api.template_folders_api
|
get_with_http_info
|
Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def get_with_http_info(self, folder_id, **kwargs): # noqa: E501
"""Get template folder # noqa: E501
Get information about a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, folder_id, **kwargs)
|
69,559 |
mailchimp_marketing.api.template_folders_api
|
list
|
List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
|
def list(self, **kwargs): # noqa: E501
"""List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
|
(self, **kwargs)
|
69,560 |
mailchimp_marketing.api.template_folders_api
|
list_with_http_info
|
List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
|
def list_with_http_info(self, **kwargs): # noqa: E501
"""List template folders # noqa: E501
Get all folders used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:return: TemplateFolders
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolders', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, **kwargs)
|
69,561 |
mailchimp_marketing.api.template_folders_api
|
remove
|
Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def remove(self, folder_id, **kwargs): # noqa: E501
"""Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_with_http_info(folder_id, **kwargs) # noqa: E501
else:
(data) = self.remove_with_http_info(folder_id, **kwargs) # noqa: E501
return data
|
(self, folder_id, **kwargs)
|
69,562 |
mailchimp_marketing.api.template_folders_api
|
remove_with_http_info
|
Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def remove_with_http_info(self, folder_id, **kwargs): # noqa: E501
"""Delete template folder # noqa: E501
Delete a specific template folder, and mark all the templates in the folder as 'unfiled'. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_with_http_info(folder_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, folder_id, **kwargs)
|
69,563 |
mailchimp_marketing.api.template_folders_api
|
update
|
Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def update(self, folder_id, body, **kwargs): # noqa: E501
"""Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_with_http_info(folder_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_with_http_info(folder_id, body, **kwargs) # noqa: E501
return data
|
(self, folder_id, body, **kwargs)
|
69,564 |
mailchimp_marketing.api.template_folders_api
|
update_with_http_info
|
Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_with_http_info(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
|
def update_with_http_info(self, folder_id, body, **kwargs): # noqa: E501
"""Update template folder # noqa: E501
Update a specific folder used to organize templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_with_http_info(folder_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_id: The unique id for the template folder. (required)
:param TemplateFolder2 body: (required)
:return: TemplateFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_id' is set
if ('folder_id' not in params or
params['folder_id'] is None):
raise ValueError("Missing the required parameter `folder_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_id' in params:
path_params['folder_id'] = params['folder_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/template-folders/{folder_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateFolder', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, folder_id, body, **kwargs)
|
69,565 |
mailchimp_marketing.api.templates_api
|
TemplatesApi
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
|
class TemplatesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def delete_template(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def delete_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
def list_with_http_info(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'created_by', 'since_date_created', 'before_date_created', 'type', 'category', 'folder_id', 'sort_field', 'sort_dir'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'created_by' in params:
query_params.append(('created_by', params['created_by'])) # noqa: E501
if 'since_date_created' in params:
query_params.append(('since_date_created', params['since_date_created'])) # noqa: E501
if 'before_date_created' in params:
query_params.append(('before_date_created', params['before_date_created'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'category' in params:
query_params.append(('category', params['category'])) # noqa: E501
if 'folder_id' in params:
query_params.append(('folder_id', params['folder_id'])) # noqa: E501
if 'sort_field' in params:
query_params.append(('sort_field', params['sort_field'])) # noqa: E501
if 'sort_dir' in params:
query_params.append(('sort_dir', params['sort_dir'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Templates', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def get_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_default_content_for_template(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def get_default_content_for_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_default_content_for_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}/default-content', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDefaultContent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
return data
def update_template_with_http_info(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_with_http_info(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(api_client)
|
69,567 |
mailchimp_marketing.api.templates_api
|
create
|
Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def create(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
|
(self, body, **kwargs)
|
69,568 |
mailchimp_marketing.api.templates_api
|
create_with_http_info
|
Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, body, **kwargs)
|
69,569 |
mailchimp_marketing.api.templates_api
|
delete_template
|
Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def delete_template(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
|
(self, template_id, **kwargs)
|
69,570 |
mailchimp_marketing.api.templates_api
|
delete_template_with_http_info
|
Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def delete_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, template_id, **kwargs)
|
69,571 |
mailchimp_marketing.api.templates_api
|
get_default_content_for_template
|
View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
|
def get_default_content_for_template(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
|
(self, template_id, **kwargs)
|
69,572 |
mailchimp_marketing.api.templates_api
|
get_default_content_for_template_with_http_info
|
View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
|
def get_default_content_for_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_default_content_for_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}/default-content', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDefaultContent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, template_id, **kwargs)
|
69,573 |
mailchimp_marketing.api.templates_api
|
get_template
|
Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def get_template(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
|
(self, template_id, **kwargs)
|
69,574 |
mailchimp_marketing.api.templates_api
|
get_template_with_http_info
|
Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def get_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, template_id, **kwargs)
|
69,575 |
mailchimp_marketing.api.templates_api
|
list
|
List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
|
def list(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
|
(self, **kwargs)
|
69,576 |
mailchimp_marketing.api.templates_api
|
list_with_http_info
|
List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
|
def list_with_http_info(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. Default value is 10. Maximum value is 1000
:param int offset: Used for [pagination](https://mailchimp.com/developer/marketing/docs/methods-parameters/#pagination), this it the number of records from a collection to skip. Default value is 0.
:param str created_by: The Mailchimp account user who created the template.
:param str since_date_created: Restrict the response to templates created after the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str before_date_created: Restrict the response to templates created before the set date. Uses ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:param str sort_dir: Determines the order direction for sorted results.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'created_by', 'since_date_created', 'before_date_created', 'type', 'category', 'folder_id', 'sort_field', 'sort_dir'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'created_by' in params:
query_params.append(('created_by', params['created_by'])) # noqa: E501
if 'since_date_created' in params:
query_params.append(('since_date_created', params['since_date_created'])) # noqa: E501
if 'before_date_created' in params:
query_params.append(('before_date_created', params['before_date_created'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'category' in params:
query_params.append(('category', params['category'])) # noqa: E501
if 'folder_id' in params:
query_params.append(('folder_id', params['folder_id'])) # noqa: E501
if 'sort_field' in params:
query_params.append(('sort_field', params['sort_field'])) # noqa: E501
if 'sort_dir' in params:
query_params.append(('sort_dir', params['sort_dir'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Templates', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, **kwargs)
|
69,577 |
mailchimp_marketing.api.templates_api
|
update_template
|
Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def update_template(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
return data
|
(self, template_id, body, **kwargs)
|
69,578 |
mailchimp_marketing.api.templates_api
|
update_template_with_http_info
|
Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_with_http_info(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
|
def update_template_with_http_info(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_with_http_info(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, template_id, body, **kwargs)
|
69,579 |
mailchimp_marketing.api.verified_domains_api
|
VerifiedDomainsApi
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
|
class VerifiedDomainsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def create_verified_domain(self, body, **kwargs): # noqa: E501
"""Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_verified_domain_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_verified_domain_with_http_info(body, **kwargs) # noqa: E501
return data
def create_verified_domain_with_http_info(self, body, **kwargs): # noqa: E501
"""Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_verified_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_domain(self, domain_name, **kwargs): # noqa: E501
"""Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_domain_with_http_info(domain_name, **kwargs) # noqa: E501
else:
(data) = self.delete_domain_with_http_info(domain_name, **kwargs) # noqa: E501
return data
def delete_domain_with_http_info(self, domain_name, **kwargs): # noqa: E501
"""Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_domain(self, domain_name, **kwargs): # noqa: E501
"""Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_domain_with_http_info(domain_name, **kwargs) # noqa: E501
else:
(data) = self.get_domain_with_http_info(domain_name, **kwargs) # noqa: E501
return data
def get_domain_with_http_info(self, domain_name, **kwargs): # noqa: E501
"""Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_verified_domains_all(self, **kwargs): # noqa: E501
"""List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_verified_domains_all_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_verified_domains_all_with_http_info(**kwargs) # noqa: E501
return data
def get_verified_domains_all_with_http_info(self, **kwargs): # noqa: E501
"""List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_verified_domains_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def submit_domain_verification(self, domain_name, body, **kwargs): # noqa: E501
"""Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.submit_domain_verification_with_http_info(domain_name, body, **kwargs) # noqa: E501
else:
(data) = self.submit_domain_verification_with_http_info(domain_name, body, **kwargs) # noqa: E501
return data
def submit_domain_verification_with_http_info(self, domain_name, body, **kwargs): # noqa: E501
"""Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification_with_http_info(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_domain_verification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}/actions/verify', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(api_client)
|
69,581 |
mailchimp_marketing.api.verified_domains_api
|
create_verified_domain
|
Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def create_verified_domain(self, body, **kwargs): # noqa: E501
"""Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_verified_domain_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_verified_domain_with_http_info(body, **kwargs) # noqa: E501
return data
|
(self, body, **kwargs)
|
69,582 |
mailchimp_marketing.api.verified_domains_api
|
create_verified_domain_with_http_info
|
Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def create_verified_domain_with_http_info(self, body, **kwargs): # noqa: E501
"""Add domain to account # noqa: E501
Add a domain to the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_verified_domain_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VerifiedDomains2 body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_verified_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, body, **kwargs)
|
69,583 |
mailchimp_marketing.api.verified_domains_api
|
delete_domain
|
Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def delete_domain(self, domain_name, **kwargs): # noqa: E501
"""Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_domain_with_http_info(domain_name, **kwargs) # noqa: E501
else:
(data) = self.delete_domain_with_http_info(domain_name, **kwargs) # noqa: E501
return data
|
(self, domain_name, **kwargs)
|
69,584 |
mailchimp_marketing.api.verified_domains_api
|
delete_domain_with_http_info
|
Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
def delete_domain_with_http_info(self, domain_name, **kwargs): # noqa: E501
"""Delete domain # noqa: E501
Delete a verified domain from the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, domain_name, **kwargs)
|
69,585 |
mailchimp_marketing.api.verified_domains_api
|
get_domain
|
Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def get_domain(self, domain_name, **kwargs): # noqa: E501
"""Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_domain_with_http_info(domain_name, **kwargs) # noqa: E501
else:
(data) = self.get_domain_with_http_info(domain_name, **kwargs) # noqa: E501
return data
|
(self, domain_name, **kwargs)
|
69,586 |
mailchimp_marketing.api.verified_domains_api
|
get_domain_with_http_info
|
Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def get_domain_with_http_info(self, domain_name, **kwargs): # noqa: E501
"""Get domain info # noqa: E501
Get the details for a single domain on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain_with_http_info(domain_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, domain_name, **kwargs)
|
69,587 |
mailchimp_marketing.api.verified_domains_api
|
get_verified_domains_all
|
List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
|
def get_verified_domains_all(self, **kwargs): # noqa: E501
"""List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_verified_domains_all_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_verified_domains_all_with_http_info(**kwargs) # noqa: E501
return data
|
(self, **kwargs)
|
69,588 |
mailchimp_marketing.api.verified_domains_api
|
get_verified_domains_all_with_http_info
|
List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
|
def get_verified_domains_all_with_http_info(self, **kwargs): # noqa: E501
"""List sending domains # noqa: E501
Get all of the sending domains on the account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_verified_domains_all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VerifiedDomains1
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_verified_domains_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, **kwargs)
|
69,589 |
mailchimp_marketing.api.verified_domains_api
|
submit_domain_verification
|
Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def submit_domain_verification(self, domain_name, body, **kwargs): # noqa: E501
"""Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.submit_domain_verification_with_http_info(domain_name, body, **kwargs) # noqa: E501
else:
(data) = self.submit_domain_verification_with_http_info(domain_name, body, **kwargs) # noqa: E501
return data
|
(self, domain_name, body, **kwargs)
|
69,590 |
mailchimp_marketing.api.verified_domains_api
|
submit_domain_verification_with_http_info
|
Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification_with_http_info(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
|
def submit_domain_verification_with_http_info(self, domain_name, body, **kwargs): # noqa: E501
"""Verify domain # noqa: E501
Verify a domain for sending. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_domain_verification_with_http_info(domain_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str domain_name: The domain name. (required)
:param VerifyADomainForSending_ body: (required)
:return: VerifiedDomains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_domain_verification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'domain_name' is set
if ('domain_name' not in params or
params['domain_name'] is None):
raise ValueError("Missing the required parameter `domain_name` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'domain_name' in params:
path_params['domain_name'] = params['domain_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/verified-domains/{domain_name}/actions/verify', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VerifiedDomains', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
(self, domain_name, body, **kwargs)
|
69,594 |
yagmail.sender
|
SMTP
|
:class:`yagmail.SMTP` is a magic wrapper around
``smtplib``'s SMTP connection, and allows messages to be sent.
|
class SMTP:
""" :class:`yagmail.SMTP` is a magic wrapper around
``smtplib``'s SMTP connection, and allows messages to be sent."""
def __init__(
self,
user=None,
password=None,
host="smtp.gmail.com",
port=None,
smtp_starttls=None,
smtp_ssl=True,
smtp_set_debuglevel=0,
smtp_skip_login=False,
encoding="utf-8",
oauth2_file=None,
soft_email_validation=True,
dkim=None,
**kwargs
):
self.log = get_logger()
self.set_logging()
self.soft_email_validation = soft_email_validation
if oauth2_file is not None:
oauth2_info = get_oauth2_info(oauth2_file, user)
if user is None:
user = oauth2_info["email_address"]
if smtp_skip_login and user is None:
user = ""
elif user is None:
user = find_user_home_path()
self.user, self.useralias = make_addr_alias_user(user)
if soft_email_validation:
validate_email_with_regex(self.user)
self.is_closed = None
self.host = host
self.port = str(port) if port is not None else "465" if smtp_ssl else "587"
self.smtp_starttls = smtp_starttls
self.ssl = smtp_ssl
self.smtp_skip_login = smtp_skip_login
self.debuglevel = smtp_set_debuglevel
self.encoding = encoding
self.kwargs = kwargs
self.cache = {}
self.unsent = []
self.num_mail_sent = 0
self.oauth2_file = oauth2_file
self.credentials = password if oauth2_file is None else oauth2_info
self.dkim = dkim
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.is_closed:
self.close()
return False
@property
def connection(self):
return smtplib.SMTP_SSL if self.ssl else smtplib.SMTP
@property
def starttls(self):
if self.smtp_starttls is None:
return False if self.ssl else True
return self.smtp_starttls
def set_logging(self, log_level=logging.ERROR, file_path_name=None):
"""
This function allows to change the logging backend, either output or file as backend
It also allows to set the logging level (whether to display only critical/error/info/debug.
for example::
yag = yagmail.SMTP()
yag.set_logging(yagmail.logging.DEBUG) # to see everything
and::
yagmail.set_logging(yagmail.logging.DEBUG, 'somelocalfile.log')
lastly, a log_level of :py:class:`None` will make sure there is no I/O.
"""
self.log = get_logger(log_level, file_path_name)
def prepare_send(
self,
to=None,
subject=None,
contents=None,
attachments=None,
cc=None,
bcc=None,
headers=None,
prettify_html=True,
message_id=None,
group_messages=True,
):
addresses = resolve_addresses(self.user, self.useralias, to, cc, bcc)
if self.soft_email_validation:
for email_addr in addresses["recipients"]:
validate_email_with_regex(email_addr)
msg = prepare_message(
self.user,
self.useralias,
addresses,
subject,
contents,
attachments,
headers,
self.encoding,
prettify_html,
message_id,
group_messages,
self.dkim,
)
recipients = addresses["recipients"]
msg_strings = msg.as_string()
return recipients, msg_strings
def send(
self,
to=None,
subject=None,
contents=None,
attachments=None,
cc=None,
bcc=None,
preview_only=False,
headers=None,
prettify_html=True,
message_id=None,
group_messages=True,
):
""" Use this to send an email with gmail"""
self.login()
recipients, msg_strings = self.prepare_send(
to,
subject,
contents,
attachments,
cc,
bcc,
headers,
prettify_html,
message_id,
group_messages,
)
if preview_only:
return recipients, msg_strings
return self._attempt_send(recipients, msg_strings)
def _attempt_send(self, recipients, msg_strings):
attempts = 0
while attempts < 3:
try:
result = self.smtp.sendmail(self.user, recipients, msg_strings)
self.log.info("Message sent to %s", recipients)
self.num_mail_sent += 1
return result
except smtplib.SMTPServerDisconnected as e:
self.log.error(e)
attempts += 1
time.sleep(attempts * 3)
self.unsent.append((recipients, msg_strings))
return False
def send_unsent(self):
"""
Emails that were not being able to send will be stored in :attr:`self.unsent`.
Use this function to attempt to send these again
"""
for i in range(len(self.unsent)):
recipients, msg_strings = self.unsent.pop(i)
self._attempt_send(recipients, msg_strings)
def close(self):
""" Close the connection to the SMTP server """
self.is_closed = True
try:
self.smtp.quit()
except (TypeError, AttributeError, smtplib.SMTPServerDisconnected):
pass
def login(self):
if self.oauth2_file is not None:
self._login_oauth2(self.credentials)
else:
self._login(self.credentials)
def _login(self, password):
"""
Login to the SMTP server using password. `login` only needs to be manually run when the
connection to the SMTP server was closed by the user.
"""
self.smtp = self.connection(self.host, self.port, **self.kwargs)
self.smtp.set_debuglevel(self.debuglevel)
if self.starttls:
self.smtp.ehlo()
if self.starttls is True:
self.smtp.starttls()
else:
self.smtp.starttls(**self.starttls)
self.smtp.ehlo()
self.is_closed = False
if not self.smtp_skip_login:
password = self.handle_password(self.user, password)
self.smtp.login(self.user, password)
self.log.info("Connected to SMTP @ %s:%s as %s", self.host, self.port, self.user)
@staticmethod
def handle_password(user, password):
return handle_password(user, password)
@staticmethod
def get_oauth_string(user, oauth2_info):
return get_oauth_string(user, oauth2_info)
def _login_oauth2(self, oauth2_info):
if "email_address" in oauth2_info:
oauth2_info.pop("email_address")
self.smtp = self.connection(self.host, self.port, **self.kwargs)
try:
self.smtp.set_debuglevel(self.debuglevel)
except AttributeError:
pass
auth_string = self.get_oauth_string(self.user, oauth2_info)
self.smtp.ehlo(oauth2_info["google_client_id"])
if self.starttls is True:
self.smtp.starttls()
self.smtp.docmd("AUTH", "XOAUTH2 " + auth_string)
def feedback(self, message="Awesome features! You made my day! How can I contribute?"):
""" Most important function. Please send me feedback :-) """
self.send("[email protected]", "Yagmail feedback", message)
def __del__(self):
try:
if not self.is_closed:
self.close()
except AttributeError:
pass
|
(user=None, password=None, host='smtp.gmail.com', port=None, smtp_starttls=None, smtp_ssl=True, smtp_set_debuglevel=0, smtp_skip_login=False, encoding='utf-8', oauth2_file=None, soft_email_validation=True, dkim=None, **kwargs)
|
69,595 |
yagmail.sender
|
__del__
| null |
def __del__(self):
try:
if not self.is_closed:
self.close()
except AttributeError:
pass
|
(self)
|
69,597 |
yagmail.sender
|
__exit__
| null |
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.is_closed:
self.close()
return False
|
(self, exc_type, exc_val, exc_tb)
|
69,598 |
yagmail.sender
|
__init__
| null |
def __init__(
self,
user=None,
password=None,
host="smtp.gmail.com",
port=None,
smtp_starttls=None,
smtp_ssl=True,
smtp_set_debuglevel=0,
smtp_skip_login=False,
encoding="utf-8",
oauth2_file=None,
soft_email_validation=True,
dkim=None,
**kwargs
):
self.log = get_logger()
self.set_logging()
self.soft_email_validation = soft_email_validation
if oauth2_file is not None:
oauth2_info = get_oauth2_info(oauth2_file, user)
if user is None:
user = oauth2_info["email_address"]
if smtp_skip_login and user is None:
user = ""
elif user is None:
user = find_user_home_path()
self.user, self.useralias = make_addr_alias_user(user)
if soft_email_validation:
validate_email_with_regex(self.user)
self.is_closed = None
self.host = host
self.port = str(port) if port is not None else "465" if smtp_ssl else "587"
self.smtp_starttls = smtp_starttls
self.ssl = smtp_ssl
self.smtp_skip_login = smtp_skip_login
self.debuglevel = smtp_set_debuglevel
self.encoding = encoding
self.kwargs = kwargs
self.cache = {}
self.unsent = []
self.num_mail_sent = 0
self.oauth2_file = oauth2_file
self.credentials = password if oauth2_file is None else oauth2_info
self.dkim = dkim
|
(self, user=None, password=None, host='smtp.gmail.com', port=None, smtp_starttls=None, smtp_ssl=True, smtp_set_debuglevel=0, smtp_skip_login=False, encoding='utf-8', oauth2_file=None, soft_email_validation=True, dkim=None, **kwargs)
|
69,599 |
yagmail.sender
|
_attempt_send
| null |
def _attempt_send(self, recipients, msg_strings):
attempts = 0
while attempts < 3:
try:
result = self.smtp.sendmail(self.user, recipients, msg_strings)
self.log.info("Message sent to %s", recipients)
self.num_mail_sent += 1
return result
except smtplib.SMTPServerDisconnected as e:
self.log.error(e)
attempts += 1
time.sleep(attempts * 3)
self.unsent.append((recipients, msg_strings))
return False
|
(self, recipients, msg_strings)
|
69,600 |
yagmail.sender
|
_login
|
Login to the SMTP server using password. `login` only needs to be manually run when the
connection to the SMTP server was closed by the user.
|
def _login(self, password):
"""
Login to the SMTP server using password. `login` only needs to be manually run when the
connection to the SMTP server was closed by the user.
"""
self.smtp = self.connection(self.host, self.port, **self.kwargs)
self.smtp.set_debuglevel(self.debuglevel)
if self.starttls:
self.smtp.ehlo()
if self.starttls is True:
self.smtp.starttls()
else:
self.smtp.starttls(**self.starttls)
self.smtp.ehlo()
self.is_closed = False
if not self.smtp_skip_login:
password = self.handle_password(self.user, password)
self.smtp.login(self.user, password)
self.log.info("Connected to SMTP @ %s:%s as %s", self.host, self.port, self.user)
|
(self, password)
|
69,601 |
yagmail.sender
|
_login_oauth2
| null |
def _login_oauth2(self, oauth2_info):
if "email_address" in oauth2_info:
oauth2_info.pop("email_address")
self.smtp = self.connection(self.host, self.port, **self.kwargs)
try:
self.smtp.set_debuglevel(self.debuglevel)
except AttributeError:
pass
auth_string = self.get_oauth_string(self.user, oauth2_info)
self.smtp.ehlo(oauth2_info["google_client_id"])
if self.starttls is True:
self.smtp.starttls()
self.smtp.docmd("AUTH", "XOAUTH2 " + auth_string)
|
(self, oauth2_info)
|
69,602 |
yagmail.sender
|
close
|
Close the connection to the SMTP server
|
def close(self):
""" Close the connection to the SMTP server """
self.is_closed = True
try:
self.smtp.quit()
except (TypeError, AttributeError, smtplib.SMTPServerDisconnected):
pass
|
(self)
|
69,603 |
yagmail.sender
|
feedback
|
Most important function. Please send me feedback :-)
|
def feedback(self, message="Awesome features! You made my day! How can I contribute?"):
""" Most important function. Please send me feedback :-) """
self.send("[email protected]", "Yagmail feedback", message)
|
(self, message='Awesome features! You made my day! How can I contribute?')
|
69,604 |
yagmail.sender
|
get_oauth_string
| null |
@staticmethod
def get_oauth_string(user, oauth2_info):
return get_oauth_string(user, oauth2_info)
|
(user, oauth2_info)
|
69,605 |
yagmail.sender
|
handle_password
| null |
@staticmethod
def handle_password(user, password):
return handle_password(user, password)
|
(user, password)
|
69,606 |
yagmail.sender
|
login
| null |
def login(self):
if self.oauth2_file is not None:
self._login_oauth2(self.credentials)
else:
self._login(self.credentials)
|
(self)
|
69,607 |
yagmail.sender
|
prepare_send
| null |
def prepare_send(
self,
to=None,
subject=None,
contents=None,
attachments=None,
cc=None,
bcc=None,
headers=None,
prettify_html=True,
message_id=None,
group_messages=True,
):
addresses = resolve_addresses(self.user, self.useralias, to, cc, bcc)
if self.soft_email_validation:
for email_addr in addresses["recipients"]:
validate_email_with_regex(email_addr)
msg = prepare_message(
self.user,
self.useralias,
addresses,
subject,
contents,
attachments,
headers,
self.encoding,
prettify_html,
message_id,
group_messages,
self.dkim,
)
recipients = addresses["recipients"]
msg_strings = msg.as_string()
return recipients, msg_strings
|
(self, to=None, subject=None, contents=None, attachments=None, cc=None, bcc=None, headers=None, prettify_html=True, message_id=None, group_messages=True)
|
69,608 |
yagmail.sender
|
send
|
Use this to send an email with gmail
|
def send(
self,
to=None,
subject=None,
contents=None,
attachments=None,
cc=None,
bcc=None,
preview_only=False,
headers=None,
prettify_html=True,
message_id=None,
group_messages=True,
):
""" Use this to send an email with gmail"""
self.login()
recipients, msg_strings = self.prepare_send(
to,
subject,
contents,
attachments,
cc,
bcc,
headers,
prettify_html,
message_id,
group_messages,
)
if preview_only:
return recipients, msg_strings
return self._attempt_send(recipients, msg_strings)
|
(self, to=None, subject=None, contents=None, attachments=None, cc=None, bcc=None, preview_only=False, headers=None, prettify_html=True, message_id=None, group_messages=True)
|
69,609 |
yagmail.sender
|
send_unsent
|
Emails that were not being able to send will be stored in :attr:`self.unsent`.
Use this function to attempt to send these again
|
def send_unsent(self):
"""
Emails that were not being able to send will be stored in :attr:`self.unsent`.
Use this function to attempt to send these again
"""
for i in range(len(self.unsent)):
recipients, msg_strings = self.unsent.pop(i)
self._attempt_send(recipients, msg_strings)
|
(self)
|
69,610 |
yagmail.sender
|
set_logging
|
This function allows to change the logging backend, either output or file as backend
It also allows to set the logging level (whether to display only critical/error/info/debug.
for example::
yag = yagmail.SMTP()
yag.set_logging(yagmail.logging.DEBUG) # to see everything
and::
yagmail.set_logging(yagmail.logging.DEBUG, 'somelocalfile.log')
lastly, a log_level of :py:class:`None` will make sure there is no I/O.
|
def set_logging(self, log_level=logging.ERROR, file_path_name=None):
"""
This function allows to change the logging backend, either output or file as backend
It also allows to set the logging level (whether to display only critical/error/info/debug.
for example::
yag = yagmail.SMTP()
yag.set_logging(yagmail.logging.DEBUG) # to see everything
and::
yagmail.set_logging(yagmail.logging.DEBUG, 'somelocalfile.log')
lastly, a log_level of :py:class:`None` will make sure there is no I/O.
"""
self.log = get_logger(log_level, file_path_name)
|
(self, log_level=40, file_path_name=None)
|
69,611 |
yagmail.error
|
YagAddressError
|
This means that the address was given in an invalid format.
Note that From can either be a string, or a dictionary where the key is an email,
and the value is an alias {'[email protected]', 'Sam'}. In the case of 'to',
it can either be a string (email), a list of emails (email addresses without aliases)
or a dictionary where keys are the email addresses and the values indicate the aliases.
Furthermore, it does not do any validation of whether an email exists.
|
class YagAddressError(Exception):
"""
This means that the address was given in an invalid format.
Note that From can either be a string, or a dictionary where the key is an email,
and the value is an alias {'[email protected]', 'Sam'}. In the case of 'to',
it can either be a string (email), a list of emails (email addresses without aliases)
or a dictionary where keys are the email addresses and the values indicate the aliases.
Furthermore, it does not do any validation of whether an email exists.
"""
pass
| null |
69,612 |
yagmail.error
|
YagConnectionClosed
|
The connection object has been closed by the user.
This object can be used to send emails again after logging in,
using self.login().
|
class YagConnectionClosed(Exception):
"""
The connection object has been closed by the user.
This object can be used to send emails again after logging in,
using self.login().
"""
pass
| null |
69,617 |
yagmail.utils
|
inline
|
Only needed when wanting to inline an image rather than attach it
|
class inline(str):
""" Only needed when wanting to inline an image rather than attach it """
pass
| null |
69,623 |
yagmail.utils
|
raw
|
Ensure that a string is treated as text and will not receive 'magic'.
|
class raw(str):
""" Ensure that a string is treated as text and will not receive 'magic'. """
pass
| null |
69,624 |
yagmail.password
|
register
|
Use this to add a new gmail account to your OS' keyring so it can be used in yagmail
|
def register(username, password):
""" Use this to add a new gmail account to your OS' keyring so it can be used in yagmail """
keyring.set_password("yagmail", username, password)
|
(username, password)
|
69,628 |
cachecontrol.wrapper
|
CacheControl
| null |
def CacheControl(
sess: requests.Session,
cache: BaseCache | None = None,
cache_etags: bool = True,
serializer: Serializer | None = None,
heuristic: BaseHeuristic | None = None,
controller_class: type[CacheController] | None = None,
adapter_class: type[CacheControlAdapter] | None = None,
cacheable_methods: Collection[str] | None = None,
) -> requests.Session:
cache = DictCache() if cache is None else cache
adapter_class = adapter_class or CacheControlAdapter
adapter = adapter_class(
cache,
cache_etags=cache_etags,
serializer=serializer,
heuristic=heuristic,
controller_class=controller_class,
cacheable_methods=cacheable_methods,
)
sess.mount("http://", adapter)
sess.mount("https://", adapter)
return sess
|
(sess: 'requests.Session', cache: 'BaseCache | None' = None, cache_etags: 'bool' = True, serializer: 'Serializer | None' = None, heuristic: 'BaseHeuristic | None' = None, controller_class: 'type[CacheController] | None' = None, adapter_class: 'type[CacheControlAdapter] | None' = None, cacheable_methods: 'Collection[str] | None' = None) -> 'requests.Session'
|
69,629 |
cachecontrol.adapter
|
CacheControlAdapter
| null |
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = {"PUT", "PATCH", "DELETE"}
def __init__(
self,
cache: BaseCache | None = None,
cache_etags: bool = True,
controller_class: type[CacheController] | None = None,
serializer: Serializer | None = None,
heuristic: BaseHeuristic | None = None,
cacheable_methods: Collection[str] | None = None,
*args: Any,
**kw: Any,
) -> None:
super().__init__(*args, **kw)
self.cache = DictCache() if cache is None else cache
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ("GET",)
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache, cache_etags=cache_etags, serializer=serializer
)
def send(
self,
request: PreparedRequest,
stream: bool = False,
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
verify: bool | str = True,
cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
proxies: Mapping[str, str] | None = None,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
cacheable = cacheable_methods or self.cacheable_methods
if request.method in cacheable:
try:
cached_response = self.controller.cached_request(request)
except zlib.error:
cached_response = None
if cached_response:
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super().send(request, stream, timeout, verify, cert, proxies)
return resp
def build_response(
self,
request: PreparedRequest,
response: HTTPResponse,
from_cache: bool = False,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
cacheable = cacheable_methods or self.cacheable_methods
if not from_cache and request.method in cacheable:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
self.controller.cache_response(request, response)
else:
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper( # type: ignore[assignment]
response._fp, # type: ignore[arg-type]
functools.partial(
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
def _update_chunk_length(self: HTTPResponse) -> None:
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close() # type: ignore[union-attr]
response._update_chunk_length = types.MethodType( # type: ignore[method-assign]
_update_chunk_length, response
)
resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call]
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
assert request.url is not None
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache # type: ignore[attr-defined]
return resp
def close(self) -> None:
self.cache.close()
super().close() # type: ignore[no-untyped-call]
|
(cache: 'BaseCache | None' = None, cache_etags: 'bool' = True, controller_class: 'type[CacheController] | None' = None, serializer: 'Serializer | None' = None, heuristic: 'BaseHeuristic | None' = None, cacheable_methods: 'Collection[str] | None' = None, *args: 'Any', **kw: 'Any') -> 'None'
|
69,631 |
cachecontrol.adapter
|
__init__
| null |
def __init__(
self,
cache: BaseCache | None = None,
cache_etags: bool = True,
controller_class: type[CacheController] | None = None,
serializer: Serializer | None = None,
heuristic: BaseHeuristic | None = None,
cacheable_methods: Collection[str] | None = None,
*args: Any,
**kw: Any,
) -> None:
super().__init__(*args, **kw)
self.cache = DictCache() if cache is None else cache
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ("GET",)
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache, cache_etags=cache_etags, serializer=serializer
)
|
(self, cache: 'BaseCache | None' = None, cache_etags: 'bool' = True, controller_class: 'type[CacheController] | None' = None, serializer: 'Serializer | None' = None, heuristic: 'BaseHeuristic | None' = None, cacheable_methods: 'Collection[str] | None' = None, *args: 'Any', **kw: 'Any') -> 'None'
|
69,634 |
cachecontrol.adapter
|
build_response
|
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
|
def build_response(
self,
request: PreparedRequest,
response: HTTPResponse,
from_cache: bool = False,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
cacheable = cacheable_methods or self.cacheable_methods
if not from_cache and request.method in cacheable:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
self.controller.cache_response(request, response)
else:
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper( # type: ignore[assignment]
response._fp, # type: ignore[arg-type]
functools.partial(
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
def _update_chunk_length(self: HTTPResponse) -> None:
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close() # type: ignore[union-attr]
response._update_chunk_length = types.MethodType( # type: ignore[method-assign]
_update_chunk_length, response
)
resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call]
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
assert request.url is not None
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache # type: ignore[attr-defined]
return resp
|
(self, request: 'PreparedRequest', response: 'HTTPResponse', from_cache: 'bool' = False, cacheable_methods: 'Collection[str] | None' = None) -> 'Response'
|
69,636 |
cachecontrol.adapter
|
close
| null |
def close(self) -> None:
self.cache.close()
super().close() # type: ignore[no-untyped-call]
|
(self) -> NoneType
|
69,642 |
cachecontrol.adapter
|
send
|
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
|
def send(
self,
request: PreparedRequest,
stream: bool = False,
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
verify: bool | str = True,
cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
proxies: Mapping[str, str] | None = None,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
cacheable = cacheable_methods or self.cacheable_methods
if request.method in cacheable:
try:
cached_response = self.controller.cached_request(request)
except zlib.error:
cached_response = None
if cached_response:
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super().send(request, stream, timeout, verify, cert, proxies)
return resp
|
(self, request: 'PreparedRequest', stream: 'bool' = False, timeout: 'None | float | tuple[float, float] | tuple[float, None]' = None, verify: 'bool | str' = True, cert: 'None | bytes | str | tuple[bytes | str, bytes | str]' = None, proxies: 'Mapping[str, str] | None' = None, cacheable_methods: 'Collection[str] | None' = None) -> 'Response'
|
69,643 |
cachecontrol.controller
|
CacheController
|
An interface to see if request should cached or not.
|
class CacheController:
"""An interface to see if request should cached or not."""
def __init__(
self,
cache: BaseCache | None = None,
cache_etags: bool = True,
serializer: Serializer | None = None,
status_codes: Collection[int] | None = None,
):
self.cache = DictCache() if cache is None else cache
self.cache_etags = cache_etags
self.serializer = serializer or Serializer()
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
@classmethod
def _urlnorm(cls, uri: str) -> str:
"""Normalize the URL to create a safe key for the cache"""
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
scheme = scheme.lower()
authority = authority.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
defrag_uri = scheme + "://" + authority + request_uri
return defrag_uri
@classmethod
def cache_url(cls, uri: str) -> str:
return cls._urlnorm(uri)
def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
known_directives = {
# https://tools.ietf.org/html/rfc7234#section-5.2
"max-age": (int, True),
"max-stale": (int, False),
"min-fresh": (int, True),
"no-cache": (None, False),
"no-store": (None, False),
"no-transform": (None, False),
"only-if-cached": (None, False),
"must-revalidate": (None, False),
"public": (None, False),
"private": (None, False),
"proxy-revalidate": (None, False),
"s-maxage": (int, True),
}
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
retval: dict[str, int | None] = {}
for cc_directive in cc_headers.split(","):
if not cc_directive.strip():
continue
parts = cc_directive.split("=", 1)
directive = parts[0].strip()
try:
typ, required = known_directives[directive]
except KeyError:
logger.debug("Ignoring unknown cache-control directive: %s", directive)
continue
if not typ or not required:
retval[directive] = None
if typ:
try:
retval[directive] = typ(parts[1].strip())
except IndexError:
if required:
logger.debug(
"Missing value for cache-control " "directive: %s",
directive,
)
except ValueError:
logger.debug(
"Invalid value for cache-control directive " "%s, must be %s",
directive,
typ.__name__,
)
return retval
def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
"""
Load a cached response, or return None if it's not available.
"""
# We do not support caching of partial content: so if the request contains a
# Range header then we don't want to load anything from the cache.
if "Range" in request.headers:
return None
cache_url = request.url
assert cache_url is not None
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug("No cache entry available")
return None
if isinstance(self.cache, SeparateBodyBaseCache):
body_file = self.cache.get_body(cache_url)
else:
body_file = None
result = self.serializer.loads(request, cache_data, body_file)
if result is None:
logger.warning("Cache entry deserialization failed, entry ignored")
return result
def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
"""
Return a cached response if it exists in the cache, otherwise
return False.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Looking up "%s" in the cache', cache_url)
cc = self.parse_cache_control(request.headers)
# Bail out if the request insists on fresh data
if "no-cache" in cc:
logger.debug('Request header has "no-cache", cache bypassed')
return False
if "max-age" in cc and cc["max-age"] == 0:
logger.debug('Request header has "max_age" as 0, cache bypassed')
return False
# Check whether we can load the response from the cache:
resp = self._load_from_cache(request)
if not resp:
return False
# If we have a cached permanent redirect, return it immediately. We
# don't need to test our response for other headers b/c it is
# intrinsically "cacheable" as it is Permanent.
#
# See:
# https://tools.ietf.org/html/rfc7231#section-6.4.2
#
# Client can try to refresh the value by repeating the request
# with cache busting headers as usual (ie no-cache).
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
msg = (
"Returning cached permanent redirect response "
"(ignoring date and etag information)"
)
logger.debug(msg)
return resp
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if not headers or "date" not in headers:
if "etag" not in headers:
# Without date or etag, the cached response can never be used
# and should be deleted.
logger.debug("Purging cached response: no date or etag")
self.cache.delete(cache_url)
logger.debug("Ignoring cached response: no date")
return False
now = time.time()
time_tuple = parsedate_tz(headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
current_age = max(0, now - date)
logger.debug("Current age based on date: %i", current_age)
# TODO: There is an assumption that the result will be a
# urllib3 response object. This may not be best since we
# could probably avoid instantiating or constructing the
# response until we know we need it.
resp_cc = self.parse_cache_control(headers)
# determine freshness
freshness_lifetime = 0
# Check the max-age pragma in the cache control header
max_age = resp_cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
# If there isn't a max-age, check for an expires header
elif "expires" in headers:
expires = parsedate_tz(headers["expires"])
if expires is not None:
expire_time = calendar.timegm(expires[:6]) - date
freshness_lifetime = max(0, expire_time)
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
# Determine if we are setting freshness limit in the
# request. Note, this overrides what was in the response.
max_age = cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug(
"Freshness lifetime from request max-age: %i", freshness_lifetime
)
min_fresh = cc.get("min-fresh")
if min_fresh is not None:
# adjust our current age by our min fresh
current_age += min_fresh
logger.debug("Adjusted current age from min-fresh: %i", current_age)
# Return entry if it is fresh enough
if freshness_lifetime > current_age:
logger.debug('The response is "fresh", returning cached response')
logger.debug("%i > %i", freshness_lifetime, current_age)
return resp
# we're not fresh. If we don't have an Etag, clear it out
if "etag" not in headers:
logger.debug('The cached response is "stale" with no etag, purging')
self.cache.delete(cache_url)
# return the original handler
return False
def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
resp = self._load_from_cache(request)
new_headers = {}
if resp:
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if "etag" in headers:
new_headers["If-None-Match"] = headers["ETag"]
if "last-modified" in headers:
new_headers["If-Modified-Since"] = headers["Last-Modified"]
return new_headers
def _cache_set(
self,
cache_url: str,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
expires_time: int | None = None,
) -> None:
"""
Store the data in the cache.
"""
if isinstance(self.cache, SeparateBodyBaseCache):
# We pass in the body separately; just put a placeholder empty
# string in the metadata.
self.cache.set(
cache_url,
self.serializer.dumps(request, response, b""),
expires=expires_time,
)
# body is None can happen when, for example, we're only updating
# headers, as is the case in update_cached_response().
if body is not None:
self.cache.set_body(cache_url, body)
else:
self.cache.set(
cache_url,
self.serializer.dumps(request, response, body),
expires=expires_time,
)
def cache_response(
self,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
status_codes: Collection[int] | None = None,
) -> None:
"""
Algorithm for caching requests.
This assumes a requests Response object.
"""
# From httplib2: Don't cache 206's since we aren't going to
# handle byte range requests
cacheable_status_codes = status_codes or self.cacheable_status_codes
if response.status not in cacheable_status_codes:
logger.debug(
"Status code %s not in %s", response.status, cacheable_status_codes
)
return
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
response.headers
)
if "date" in response_headers:
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
else:
date = 0
# If we've been given a body, our response has a Content-Length, that
# Content-Length is valid then we can check to see if the body we've
# been given matches the expected size, and if it doesn't we'll just
# skip trying to cache it.
if (
body is not None
and "content-length" in response_headers
and response_headers["content-length"].isdigit()
and int(response_headers["content-length"]) != len(body)
):
return
cc_req = self.parse_cache_control(request.headers)
cc = self.parse_cache_control(response_headers)
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Updating cache with response from "%s"', cache_url)
# Delete it from the cache if we happen to have it stored there
no_store = False
if "no-store" in cc:
no_store = True
logger.debug('Response header has "no-store"')
if "no-store" in cc_req:
no_store = True
logger.debug('Request header has "no-store"')
if no_store and self.cache.get(cache_url):
logger.debug('Purging existing cache entry to honor "no-store"')
self.cache.delete(cache_url)
if no_store:
return
# https://tools.ietf.org/html/rfc7234#section-4.1:
# A Vary header field-value of "*" always fails to match.
# Storing such a response leads to a deserialization warning
# during cache lookup and is not allowed to ever be served,
# so storing it can be avoided.
if "*" in response_headers.get("vary", ""):
logger.debug('Response header has "Vary: *"')
return
# If we've been given an etag, then keep the response
if self.cache_etags and "etag" in response_headers:
expires_time = 0
if response_headers.get("expires"):
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires[:6]) - date
expires_time = max(expires_time, 14 * 86400)
logger.debug(f"etag object cached for {expires_time} seconds")
logger.debug("Caching due to etag")
self._cache_set(cache_url, request, response, body, expires_time)
# Add to the cache any permanent redirects. We do this before looking
# that the Date headers.
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
logger.debug("Caching permanent redirect")
self._cache_set(cache_url, request, response, b"")
# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
# cache when there is a max-age > 0
max_age = cc.get("max-age")
if max_age is not None and max_age > 0:
logger.debug("Caching b/c date exists and max-age > 0")
expires_time = max_age
self._cache_set(
cache_url,
request,
response,
body,
expires_time,
)
# If the request can expire, it means we should cache it
# in the meantime.
elif "expires" in response_headers:
if response_headers["expires"]:
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires[:6]) - date
else:
expires_time = None
logger.debug(
"Caching b/c of expires header. expires in {} seconds".format(
expires_time
)
)
self._cache_set(
cache_url,
request,
response,
body,
expires_time,
)
def update_cached_response(
self, request: PreparedRequest, response: HTTPResponse
) -> HTTPResponse:
"""On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
cached_response = self._load_from_cache(request)
if not cached_response:
# we didn't have a cached response
return response
# Lets update our headers with the headers from the new request:
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
#
# The server isn't supposed to send headers that would make
# the cached body invalid. But... just in case, we'll be sure
# to strip out ones we know that might be problmatic due to
# typical assumptions.
excluded_headers = ["content-length"]
cached_response.headers.update(
{
k: v
for k, v in response.headers.items()
if k.lower() not in excluded_headers
}
)
# we want a 200 b/c we have content via the cache
cached_response.status = 200
# update our cache
self._cache_set(cache_url, request, cached_response)
return cached_response
|
(cache: 'BaseCache | None' = None, cache_etags: 'bool' = True, serializer: 'Serializer | None' = None, status_codes: 'Collection[int] | None' = None)
|
69,644 |
cachecontrol.controller
|
__init__
| null |
def __init__(
self,
cache: BaseCache | None = None,
cache_etags: bool = True,
serializer: Serializer | None = None,
status_codes: Collection[int] | None = None,
):
self.cache = DictCache() if cache is None else cache
self.cache_etags = cache_etags
self.serializer = serializer or Serializer()
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
(self, cache: 'BaseCache | None' = None, cache_etags: 'bool' = True, serializer: 'Serializer | None' = None, status_codes: 'Collection[int] | None' = None)
|
69,645 |
cachecontrol.controller
|
_cache_set
|
Store the data in the cache.
|
def _cache_set(
self,
cache_url: str,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
expires_time: int | None = None,
) -> None:
"""
Store the data in the cache.
"""
if isinstance(self.cache, SeparateBodyBaseCache):
# We pass in the body separately; just put a placeholder empty
# string in the metadata.
self.cache.set(
cache_url,
self.serializer.dumps(request, response, b""),
expires=expires_time,
)
# body is None can happen when, for example, we're only updating
# headers, as is the case in update_cached_response().
if body is not None:
self.cache.set_body(cache_url, body)
else:
self.cache.set(
cache_url,
self.serializer.dumps(request, response, body),
expires=expires_time,
)
|
(self, cache_url: 'str', request: 'PreparedRequest', response: 'HTTPResponse', body: 'bytes | None' = None, expires_time: 'int | None' = None) -> 'None'
|
69,646 |
cachecontrol.controller
|
_load_from_cache
|
Load a cached response, or return None if it's not available.
|
def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
"""
Load a cached response, or return None if it's not available.
"""
# We do not support caching of partial content: so if the request contains a
# Range header then we don't want to load anything from the cache.
if "Range" in request.headers:
return None
cache_url = request.url
assert cache_url is not None
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug("No cache entry available")
return None
if isinstance(self.cache, SeparateBodyBaseCache):
body_file = self.cache.get_body(cache_url)
else:
body_file = None
result = self.serializer.loads(request, cache_data, body_file)
if result is None:
logger.warning("Cache entry deserialization failed, entry ignored")
return result
|
(self, request: 'PreparedRequest') -> 'HTTPResponse | None'
|
69,647 |
cachecontrol.controller
|
cache_response
|
Algorithm for caching requests.
This assumes a requests Response object.
|
def cache_response(
self,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
status_codes: Collection[int] | None = None,
) -> None:
"""
Algorithm for caching requests.
This assumes a requests Response object.
"""
# From httplib2: Don't cache 206's since we aren't going to
# handle byte range requests
cacheable_status_codes = status_codes or self.cacheable_status_codes
if response.status not in cacheable_status_codes:
logger.debug(
"Status code %s not in %s", response.status, cacheable_status_codes
)
return
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
response.headers
)
if "date" in response_headers:
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
else:
date = 0
# If we've been given a body, our response has a Content-Length, that
# Content-Length is valid then we can check to see if the body we've
# been given matches the expected size, and if it doesn't we'll just
# skip trying to cache it.
if (
body is not None
and "content-length" in response_headers
and response_headers["content-length"].isdigit()
and int(response_headers["content-length"]) != len(body)
):
return
cc_req = self.parse_cache_control(request.headers)
cc = self.parse_cache_control(response_headers)
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Updating cache with response from "%s"', cache_url)
# Delete it from the cache if we happen to have it stored there
no_store = False
if "no-store" in cc:
no_store = True
logger.debug('Response header has "no-store"')
if "no-store" in cc_req:
no_store = True
logger.debug('Request header has "no-store"')
if no_store and self.cache.get(cache_url):
logger.debug('Purging existing cache entry to honor "no-store"')
self.cache.delete(cache_url)
if no_store:
return
# https://tools.ietf.org/html/rfc7234#section-4.1:
# A Vary header field-value of "*" always fails to match.
# Storing such a response leads to a deserialization warning
# during cache lookup and is not allowed to ever be served,
# so storing it can be avoided.
if "*" in response_headers.get("vary", ""):
logger.debug('Response header has "Vary: *"')
return
# If we've been given an etag, then keep the response
if self.cache_etags and "etag" in response_headers:
expires_time = 0
if response_headers.get("expires"):
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires[:6]) - date
expires_time = max(expires_time, 14 * 86400)
logger.debug(f"etag object cached for {expires_time} seconds")
logger.debug("Caching due to etag")
self._cache_set(cache_url, request, response, body, expires_time)
# Add to the cache any permanent redirects. We do this before looking
# that the Date headers.
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
logger.debug("Caching permanent redirect")
self._cache_set(cache_url, request, response, b"")
# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
# cache when there is a max-age > 0
max_age = cc.get("max-age")
if max_age is not None and max_age > 0:
logger.debug("Caching b/c date exists and max-age > 0")
expires_time = max_age
self._cache_set(
cache_url,
request,
response,
body,
expires_time,
)
# If the request can expire, it means we should cache it
# in the meantime.
elif "expires" in response_headers:
if response_headers["expires"]:
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires[:6]) - date
else:
expires_time = None
logger.debug(
"Caching b/c of expires header. expires in {} seconds".format(
expires_time
)
)
self._cache_set(
cache_url,
request,
response,
body,
expires_time,
)
|
(self, request: 'PreparedRequest', response: 'HTTPResponse', body: 'bytes | None' = None, status_codes: 'Collection[int] | None' = None) -> 'None'
|
69,648 |
cachecontrol.controller
|
cached_request
|
Return a cached response if it exists in the cache, otherwise
return False.
|
def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
"""
Return a cached response if it exists in the cache, otherwise
return False.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Looking up "%s" in the cache', cache_url)
cc = self.parse_cache_control(request.headers)
# Bail out if the request insists on fresh data
if "no-cache" in cc:
logger.debug('Request header has "no-cache", cache bypassed')
return False
if "max-age" in cc and cc["max-age"] == 0:
logger.debug('Request header has "max_age" as 0, cache bypassed')
return False
# Check whether we can load the response from the cache:
resp = self._load_from_cache(request)
if not resp:
return False
# If we have a cached permanent redirect, return it immediately. We
# don't need to test our response for other headers b/c it is
# intrinsically "cacheable" as it is Permanent.
#
# See:
# https://tools.ietf.org/html/rfc7231#section-6.4.2
#
# Client can try to refresh the value by repeating the request
# with cache busting headers as usual (ie no-cache).
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
msg = (
"Returning cached permanent redirect response "
"(ignoring date and etag information)"
)
logger.debug(msg)
return resp
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if not headers or "date" not in headers:
if "etag" not in headers:
# Without date or etag, the cached response can never be used
# and should be deleted.
logger.debug("Purging cached response: no date or etag")
self.cache.delete(cache_url)
logger.debug("Ignoring cached response: no date")
return False
now = time.time()
time_tuple = parsedate_tz(headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
current_age = max(0, now - date)
logger.debug("Current age based on date: %i", current_age)
# TODO: There is an assumption that the result will be a
# urllib3 response object. This may not be best since we
# could probably avoid instantiating or constructing the
# response until we know we need it.
resp_cc = self.parse_cache_control(headers)
# determine freshness
freshness_lifetime = 0
# Check the max-age pragma in the cache control header
max_age = resp_cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
# If there isn't a max-age, check for an expires header
elif "expires" in headers:
expires = parsedate_tz(headers["expires"])
if expires is not None:
expire_time = calendar.timegm(expires[:6]) - date
freshness_lifetime = max(0, expire_time)
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
# Determine if we are setting freshness limit in the
# request. Note, this overrides what was in the response.
max_age = cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug(
"Freshness lifetime from request max-age: %i", freshness_lifetime
)
min_fresh = cc.get("min-fresh")
if min_fresh is not None:
# adjust our current age by our min fresh
current_age += min_fresh
logger.debug("Adjusted current age from min-fresh: %i", current_age)
# Return entry if it is fresh enough
if freshness_lifetime > current_age:
logger.debug('The response is "fresh", returning cached response')
logger.debug("%i > %i", freshness_lifetime, current_age)
return resp
# we're not fresh. If we don't have an Etag, clear it out
if "etag" not in headers:
logger.debug('The cached response is "stale" with no etag, purging')
self.cache.delete(cache_url)
# return the original handler
return False
|
(self, request: 'PreparedRequest') -> 'HTTPResponse | Literal[False]'
|
69,649 |
cachecontrol.controller
|
conditional_headers
| null |
def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
resp = self._load_from_cache(request)
new_headers = {}
if resp:
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if "etag" in headers:
new_headers["If-None-Match"] = headers["ETag"]
if "last-modified" in headers:
new_headers["If-Modified-Since"] = headers["Last-Modified"]
return new_headers
|
(self, request: 'PreparedRequest') -> 'dict[str, str]'
|
69,650 |
cachecontrol.controller
|
parse_cache_control
| null |
def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
known_directives = {
# https://tools.ietf.org/html/rfc7234#section-5.2
"max-age": (int, True),
"max-stale": (int, False),
"min-fresh": (int, True),
"no-cache": (None, False),
"no-store": (None, False),
"no-transform": (None, False),
"only-if-cached": (None, False),
"must-revalidate": (None, False),
"public": (None, False),
"private": (None, False),
"proxy-revalidate": (None, False),
"s-maxage": (int, True),
}
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
retval: dict[str, int | None] = {}
for cc_directive in cc_headers.split(","):
if not cc_directive.strip():
continue
parts = cc_directive.split("=", 1)
directive = parts[0].strip()
try:
typ, required = known_directives[directive]
except KeyError:
logger.debug("Ignoring unknown cache-control directive: %s", directive)
continue
if not typ or not required:
retval[directive] = None
if typ:
try:
retval[directive] = typ(parts[1].strip())
except IndexError:
if required:
logger.debug(
"Missing value for cache-control " "directive: %s",
directive,
)
except ValueError:
logger.debug(
"Invalid value for cache-control directive " "%s, must be %s",
directive,
typ.__name__,
)
return retval
|
(self, headers: Mapping[str, str]) -> dict[str, int | None]
|
69,651 |
cachecontrol.controller
|
update_cached_response
|
On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
|
def update_cached_response(
self, request: PreparedRequest, response: HTTPResponse
) -> HTTPResponse:
"""On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
cached_response = self._load_from_cache(request)
if not cached_response:
# we didn't have a cached response
return response
# Lets update our headers with the headers from the new request:
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
#
# The server isn't supposed to send headers that would make
# the cached body invalid. But... just in case, we'll be sure
# to strip out ones we know that might be problmatic due to
# typical assumptions.
excluded_headers = ["content-length"]
cached_response.headers.update(
{
k: v
for k, v in response.headers.items()
if k.lower() not in excluded_headers
}
)
# we want a 200 b/c we have content via the cache
cached_response.status = 200
# update our cache
self._cache_set(cache_url, request, cached_response)
return cached_response
|
(self, request: 'PreparedRequest', response: 'HTTPResponse') -> 'HTTPResponse'
|
69,659 |
prance
|
BaseParser
|
The BaseParser loads, parses and validates OpenAPI 2.0 and 3.0.0 specs.
Uses :py:class:`YAMLMixin` and :py:class:`JSONMixin` for additional
functionality.
|
class BaseParser(mixins.YAMLMixin, mixins.JSONMixin):
"""
The BaseParser loads, parses and validates OpenAPI 2.0 and 3.0.0 specs.
Uses :py:class:`YAMLMixin` and :py:class:`JSONMixin` for additional
functionality.
"""
BACKENDS = {
"flex": ((2,), "_validate_flex"),
"swagger-spec-validator": ((2,), "_validate_swagger_spec_validator"),
"openapi-spec-validator": ((2, 3), "_validate_openapi_spec_validator"),
}
SPEC_VERSION_2_PREFIX = "Swagger/OpenAPI"
SPEC_VERSION_3_PREFIX = "OpenAPI"
def __init__(self, url=None, spec_string=None, lazy=False, **kwargs):
"""
Load, parse and validate specs.
You can either provide a URL or a spec string, but not both.
:param str url: The URL of the file to load. URLs missing a scheme are
assumed to be file URLs.
:param str spec_string: The specifications to parse.
:param bool lazy: If true, do not load or parse anything. Instead wait for
the parse function to be invoked.
:param str backend: [optional] one of 'flex', 'swagger-spec-validator' or
'openapi-spec-validator'.
Determines the validation backend to use. Defaults to the first installed
backend in the ordered list obtained from util.validation_backends().
:param bool strict: [optional] Applies only to the 'swagger-spec-validator'
backend. If False, accepts non-String keys by stringifying them before
validation. Defaults to True.
:param str encoding: [optional] For local URLs, use the given file encoding
instead of auto-detecting. Defaults to None.
"""
assert url or spec_string and not (url and spec_string), (
"You must provide either a URL to read, or a spec string to "
"parse, but not both!"
)
# Keep the parameters around for later use
self.url = None
if url:
from .util.url import absurl
from .util.fs import abspath
import os
self.url = absurl(url, abspath(os.getcwd()))
else:
self.url = _PLACEHOLDER_URL
self._spec_string = spec_string
# Initialize variables we're filling later
self.specification = None
self.version = None
self.version_name = None
self.version_parsed = ()
self.valid = False
# Add kw args as options
self.options = kwargs
# Verify backend
from .util import default_validation_backend
self.backend = self.options.get("backend", default_validation_backend())
if self.backend not in BaseParser.BACKENDS.keys():
raise ValueError(
f"Backend may only be one of {BaseParser.BACKENDS.keys()}!"
)
# Start parsing if lazy mode is not requested.
if not lazy:
self.parse()
def parse(self): # noqa: F811
"""
When the BaseParser was lazily created, load and parse now.
You can use this function to re-use an existing parser for parsing
multiple files by setting its url property and then invoking this
function.
"""
strict = self.options.get("strict", True)
# If we have a file name, we need to read that in.
if self.url and self.url != _PLACEHOLDER_URL:
from .util.url import fetch_url
encoding = self.options.get("encoding", None)
self.specification = fetch_url(self.url, encoding=encoding, strict=strict)
# If we have a spec string, try to parse it.
if self._spec_string:
from .util.formats import parse_spec
self.specification = parse_spec(self._spec_string, self.url)
# If we have a parsed spec, convert it to JSON. Then we can validate
# the JSON. At this point, we *require* a parsed specification to exist,
# so we might as well assert.
assert self.specification, "No specification parsed, cannot validate!"
self._validate()
def _validate(self):
# Ensure specification is a mapping
from collections.abc import Mapping
if not isinstance(self.specification, Mapping):
raise ValidationError("Could not parse specifications!")
# Ensure the selected backend supports the given spec version
versions, validator_name = BaseParser.BACKENDS[self.backend]
# Fetch the spec version. Note that this is the spec version the spec
# *claims* to be; we later set the one we actually could validate as.
spec_version = None
if spec_version is None:
spec_version = self.specification.get("openapi", None)
if spec_version is None:
spec_version = self.specification.get("swagger", None)
if spec_version is None:
raise ValidationError(
"Could not determine specification schema " "version!"
)
# Try parsing the spec version, examine the first component.
import packaging.version
parsed = packaging.version.parse(spec_version)
if parsed.major not in versions:
raise ValidationError(
'Version mismatch: selected backend "%s"'
" does not support specified version %s!" % (self.backend, spec_version)
)
# Validate the parsed specs, using the given validation backend.
validator = getattr(self, validator_name)
# Set valid flag according to whether validator succeeds
self.valid = False
validator(parsed)
self.valid = True
def __set_version(self, prefix, version: Version):
self.version_name = prefix
self.version_parsed = version.release
stringified = str(version)
if prefix == BaseParser.SPEC_VERSION_2_PREFIX:
stringified = "%d.%d" % (version.major, version.minor)
self.version = f"{self.version_name} {stringified}"
def _validate_flex(self, spec_version: Version): # pragma: nocover
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
from flex.exceptions import ValidationError as JSEValidationError
from flex.core import parse as validate
try:
validate(self.specification)
except JSEValidationError as ex:
from .util.exceptions import raise_from
raise_from(ValidationError, ex)
def _validate_swagger_spec_validator(
self, spec_version: Version
): # pragma: nocover
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
from swagger_spec_validator.common import SwaggerValidationError as SSVErr
from swagger_spec_validator.validator20 import validate_spec
try:
validate_spec(self.specification)
except SSVErr as ex:
from .util.exceptions import raise_from
raise_from(ValidationError, ex)
def _validate_openapi_spec_validator(
self, spec_version: Version
): # pragma: nocover
from openapi_spec_validator import validate_spec
from jsonschema.exceptions import ValidationError as JSEValidationError
from jsonschema.exceptions import RefResolutionError
# Validate according to detected version. Unsupported versions are
# already caught outside of this function.
from .util.exceptions import raise_from
if spec_version.major == 3:
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_3_PREFIX, spec_version)
elif spec_version.major == 2:
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
try:
validate_spec(self.specification)
except TypeError as type_ex: # pragma: nocover
raise_from(ValidationError, type_ex, self._strict_warning())
except JSEValidationError as v2_ex:
raise_from(ValidationError, v2_ex)
except RefResolutionError as ref_ex:
raise_from(ValidationError, ref_ex)
def _strict_warning(self):
"""Return a warning if strict mode is off."""
if self.options.get("strict", True):
return (
"Strict mode enabled (the default), so this could be due to an "
"integer key, such as an HTTP status code."
)
return (
"Strict mode disabled. Prance cannot help you narrow this further "
"down, sorry."
)
|
(url=None, spec_string=None, lazy=False, **kwargs)
|
69,660 |
prance
|
__set_version
| null |
def __set_version(self, prefix, version: Version):
self.version_name = prefix
self.version_parsed = version.release
stringified = str(version)
if prefix == BaseParser.SPEC_VERSION_2_PREFIX:
stringified = "%d.%d" % (version.major, version.minor)
self.version = f"{self.version_name} {stringified}"
|
(self, prefix, version: packaging.version.Version)
|
69,661 |
prance
|
__init__
|
Load, parse and validate specs.
You can either provide a URL or a spec string, but not both.
:param str url: The URL of the file to load. URLs missing a scheme are
assumed to be file URLs.
:param str spec_string: The specifications to parse.
:param bool lazy: If true, do not load or parse anything. Instead wait for
the parse function to be invoked.
:param str backend: [optional] one of 'flex', 'swagger-spec-validator' or
'openapi-spec-validator'.
Determines the validation backend to use. Defaults to the first installed
backend in the ordered list obtained from util.validation_backends().
:param bool strict: [optional] Applies only to the 'swagger-spec-validator'
backend. If False, accepts non-String keys by stringifying them before
validation. Defaults to True.
:param str encoding: [optional] For local URLs, use the given file encoding
instead of auto-detecting. Defaults to None.
|
def __init__(self, url=None, spec_string=None, lazy=False, **kwargs):
"""
Load, parse and validate specs.
You can either provide a URL or a spec string, but not both.
:param str url: The URL of the file to load. URLs missing a scheme are
assumed to be file URLs.
:param str spec_string: The specifications to parse.
:param bool lazy: If true, do not load or parse anything. Instead wait for
the parse function to be invoked.
:param str backend: [optional] one of 'flex', 'swagger-spec-validator' or
'openapi-spec-validator'.
Determines the validation backend to use. Defaults to the first installed
backend in the ordered list obtained from util.validation_backends().
:param bool strict: [optional] Applies only to the 'swagger-spec-validator'
backend. If False, accepts non-String keys by stringifying them before
validation. Defaults to True.
:param str encoding: [optional] For local URLs, use the given file encoding
instead of auto-detecting. Defaults to None.
"""
assert url or spec_string and not (url and spec_string), (
"You must provide either a URL to read, or a spec string to "
"parse, but not both!"
)
# Keep the parameters around for later use
self.url = None
if url:
from .util.url import absurl
from .util.fs import abspath
import os
self.url = absurl(url, abspath(os.getcwd()))
else:
self.url = _PLACEHOLDER_URL
self._spec_string = spec_string
# Initialize variables we're filling later
self.specification = None
self.version = None
self.version_name = None
self.version_parsed = ()
self.valid = False
# Add kw args as options
self.options = kwargs
# Verify backend
from .util import default_validation_backend
self.backend = self.options.get("backend", default_validation_backend())
if self.backend not in BaseParser.BACKENDS.keys():
raise ValueError(
f"Backend may only be one of {BaseParser.BACKENDS.keys()}!"
)
# Start parsing if lazy mode is not requested.
if not lazy:
self.parse()
|
(self, url=None, spec_string=None, lazy=False, **kwargs)
|
69,662 |
prance
|
_strict_warning
|
Return a warning if strict mode is off.
|
def _strict_warning(self):
"""Return a warning if strict mode is off."""
if self.options.get("strict", True):
return (
"Strict mode enabled (the default), so this could be due to an "
"integer key, such as an HTTP status code."
)
return (
"Strict mode disabled. Prance cannot help you narrow this further "
"down, sorry."
)
|
(self)
|
69,663 |
prance
|
_validate
| null |
def _validate(self):
# Ensure specification is a mapping
from collections.abc import Mapping
if not isinstance(self.specification, Mapping):
raise ValidationError("Could not parse specifications!")
# Ensure the selected backend supports the given spec version
versions, validator_name = BaseParser.BACKENDS[self.backend]
# Fetch the spec version. Note that this is the spec version the spec
# *claims* to be; we later set the one we actually could validate as.
spec_version = None
if spec_version is None:
spec_version = self.specification.get("openapi", None)
if spec_version is None:
spec_version = self.specification.get("swagger", None)
if spec_version is None:
raise ValidationError(
"Could not determine specification schema " "version!"
)
# Try parsing the spec version, examine the first component.
import packaging.version
parsed = packaging.version.parse(spec_version)
if parsed.major not in versions:
raise ValidationError(
'Version mismatch: selected backend "%s"'
" does not support specified version %s!" % (self.backend, spec_version)
)
# Validate the parsed specs, using the given validation backend.
validator = getattr(self, validator_name)
# Set valid flag according to whether validator succeeds
self.valid = False
validator(parsed)
self.valid = True
|
(self)
|
69,664 |
prance
|
_validate_flex
| null |
def _validate_flex(self, spec_version: Version): # pragma: nocover
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
from flex.exceptions import ValidationError as JSEValidationError
from flex.core import parse as validate
try:
validate(self.specification)
except JSEValidationError as ex:
from .util.exceptions import raise_from
raise_from(ValidationError, ex)
|
(self, spec_version: packaging.version.Version)
|
69,665 |
prance
|
_validate_openapi_spec_validator
| null |
def _validate_openapi_spec_validator(
self, spec_version: Version
): # pragma: nocover
from openapi_spec_validator import validate_spec
from jsonschema.exceptions import ValidationError as JSEValidationError
from jsonschema.exceptions import RefResolutionError
# Validate according to detected version. Unsupported versions are
# already caught outside of this function.
from .util.exceptions import raise_from
if spec_version.major == 3:
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_3_PREFIX, spec_version)
elif spec_version.major == 2:
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
try:
validate_spec(self.specification)
except TypeError as type_ex: # pragma: nocover
raise_from(ValidationError, type_ex, self._strict_warning())
except JSEValidationError as v2_ex:
raise_from(ValidationError, v2_ex)
except RefResolutionError as ref_ex:
raise_from(ValidationError, ref_ex)
|
(self, spec_version: packaging.version.Version)
|
69,666 |
prance
|
_validate_swagger_spec_validator
| null |
def _validate_swagger_spec_validator(
self, spec_version: Version
): # pragma: nocover
# Set the version independently of whether validation succeeds
self.__set_version(BaseParser.SPEC_VERSION_2_PREFIX, spec_version)
from swagger_spec_validator.common import SwaggerValidationError as SSVErr
from swagger_spec_validator.validator20 import validate_spec
try:
validate_spec(self.specification)
except SSVErr as ex:
from .util.exceptions import raise_from
raise_from(ValidationError, ex)
|
(self, spec_version: packaging.version.Version)
|
69,667 |
prance.mixins
|
json
|
Return a JSON representation of the specifications.
:return: JSON representation.
:rtype: dict
|
def json(self):
"""
Return a JSON representation of the specifications.
:return: JSON representation.
:rtype: dict
"""
# Query specs_updated first to start caching
if self.specs_updated() or not getattr(self, self.__JSON, None):
import json
setattr(self, self.__JSON, json.dumps(self.specification))
return getattr(self, self.__JSON)
|
(self)
|
69,668 |
prance
|
parse
|
When the BaseParser was lazily created, load and parse now.
You can use this function to re-use an existing parser for parsing
multiple files by setting its url property and then invoking this
function.
|
def parse(self): # noqa: F811
"""
When the BaseParser was lazily created, load and parse now.
You can use this function to re-use an existing parser for parsing
multiple files by setting its url property and then invoking this
function.
"""
strict = self.options.get("strict", True)
# If we have a file name, we need to read that in.
if self.url and self.url != _PLACEHOLDER_URL:
from .util.url import fetch_url
encoding = self.options.get("encoding", None)
self.specification = fetch_url(self.url, encoding=encoding, strict=strict)
# If we have a spec string, try to parse it.
if self._spec_string:
from .util.formats import parse_spec
self.specification = parse_spec(self._spec_string, self.url)
# If we have a parsed spec, convert it to JSON. Then we can validate
# the JSON. At this point, we *require* a parsed specification to exist,
# so we might as well assert.
assert self.specification, "No specification parsed, cannot validate!"
self._validate()
|
(self)
|
69,669 |
prance.mixins
|
specs_updated
|
Test if self.specficiation changed.
:return: Whether the specs changed.
:rtype: bool
|
def specs_updated(self):
"""
Test if self.specficiation changed.
:return: Whether the specs changed.
:rtype: bool
"""
# Cache specs and return true if no specs have been cached
if not getattr(self, self.__CACHED_SPECS, None):
setattr(self, self.__CACHED_SPECS, self.specification.copy())
return True
# If specs have been cached, compare them to the current
# specs.
cached = getattr(self, self.__CACHED_SPECS)
if cached != self.specification:
setattr(self, self.__CACHED_SPECS, self.specification.copy())
return True
# Return false if they're the same
return False
|
(self)
|
69,670 |
prance.mixins
|
yaml
|
Return a YAML representation of the specifications.
:return: YAML representation.
:rtype: dict
|
def yaml(self):
"""
Return a YAML representation of the specifications.
:return: YAML representation.
:rtype: dict
"""
# Query specs_updated first to start caching
if self.specs_updated() or not getattr(self, self.__YAML, None):
import yaml
setattr(self, self.__YAML, yaml.dump(self.specification))
return getattr(self, self.__YAML)
|
(self)
|
69,671 |
prance
|
ResolvingParser
|
The ResolvingParser extends BaseParser with resolving references by inlining.
|
class ResolvingParser(BaseParser):
"""The ResolvingParser extends BaseParser with resolving references by inlining."""
def __init__(self, url=None, spec_string=None, lazy=False, **kwargs):
"""
See :py:class:`BaseParser`.
Resolves JSON pointers/references (i.e. '$ref' keys) before validating the
specs. The implication is that self.specification is fully resolved, and
does not contain any references.
Additional parameters, see :py::class:`util.RefResolver`.
"""
# Create a reference cache
self.__reference_cache = {}
BaseParser.__init__(self, url=url, spec_string=spec_string, lazy=lazy, **kwargs)
def _validate(self):
# We have a problem with the BaseParser's validate function: the
# jsonschema implementation underlying it does not accept relative
# path references, but the Swagger specs allow them:
# http://swagger.io/specification/#referenceObject
# We therefore use our own resolver first, and validate later.
from .util.resolver import RefResolver
forward_arg_names = (
"encoding",
"recursion_limit",
"recursion_limit_handler",
"resolve_types",
"resolve_method",
"strict",
)
forward_args = {
k: v for (k, v) in self.options.items() if k in forward_arg_names
}
resolver = RefResolver(
self.specification,
self.url,
reference_cache=self.__reference_cache,
**forward_args,
)
resolver.resolve_references()
self.specification = resolver.specs
# Now validate - the BaseParser knows the specifics
BaseParser._validate(self)
|
(url=None, spec_string=None, lazy=False, **kwargs)
|
69,673 |
prance
|
__init__
|
See :py:class:`BaseParser`.
Resolves JSON pointers/references (i.e. '$ref' keys) before validating the
specs. The implication is that self.specification is fully resolved, and
does not contain any references.
Additional parameters, see :py::class:`util.RefResolver`.
|
def __init__(self, url=None, spec_string=None, lazy=False, **kwargs):
"""
See :py:class:`BaseParser`.
Resolves JSON pointers/references (i.e. '$ref' keys) before validating the
specs. The implication is that self.specification is fully resolved, and
does not contain any references.
Additional parameters, see :py::class:`util.RefResolver`.
"""
# Create a reference cache
self.__reference_cache = {}
BaseParser.__init__(self, url=url, spec_string=spec_string, lazy=lazy, **kwargs)
|
(self, url=None, spec_string=None, lazy=False, **kwargs)
|
69,675 |
prance
|
_validate
| null |
def _validate(self):
# We have a problem with the BaseParser's validate function: the
# jsonschema implementation underlying it does not accept relative
# path references, but the Swagger specs allow them:
# http://swagger.io/specification/#referenceObject
# We therefore use our own resolver first, and validate later.
from .util.resolver import RefResolver
forward_arg_names = (
"encoding",
"recursion_limit",
"recursion_limit_handler",
"resolve_types",
"resolve_method",
"strict",
)
forward_args = {
k: v for (k, v) in self.options.items() if k in forward_arg_names
}
resolver = RefResolver(
self.specification,
self.url,
reference_cache=self.__reference_cache,
**forward_args,
)
resolver.resolve_references()
self.specification = resolver.specs
# Now validate - the BaseParser knows the specifics
BaseParser._validate(self)
|
(self)
|
69,683 |
prance
|
ValidationError
| null |
class ValidationError(Exception):
pass
| null |
69,695 |
prance
|
_TranslatingParser
| null |
class _TranslatingParser(BaseParser):
def _validate(self):
from .util.translator import _RefTranslator
translator = _RefTranslator(self.specification, self.url)
translator.translate_references()
self.specification = translator.specs
BaseParser._validate(self)
|
(url=None, spec_string=None, lazy=False, **kwargs)
|
69,699 |
prance
|
_validate
| null |
def _validate(self):
from .util.translator import _RefTranslator
translator = _RefTranslator(self.specification, self.url)
translator.translate_references()
self.specification = translator.specs
BaseParser._validate(self)
|
(self)
|
69,710 |
jupyterlab_topbar_text
|
_jupyter_labextension_paths
| null |
def _jupyter_labextension_paths():
return [{
'src': 'labextension',
'dest': "jupyterlab-topbar-text"
}]
|
()
|
69,712 |
langchain
|
__getattr__
| null |
def __getattr__(name: str) -> Any:
if name == "MRKLChain":
from langchain.agents import MRKLChain
_warn_on_import(name, replacement="langchain.agents.MRKLChain")
return MRKLChain
elif name == "ReActChain":
from langchain.agents import ReActChain
_warn_on_import(name, replacement="langchain.agents.ReActChain")
return ReActChain
elif name == "SelfAskWithSearchChain":
from langchain.agents import SelfAskWithSearchChain
_warn_on_import(name, replacement="langchain.agents.SelfAskWithSearchChain")
return SelfAskWithSearchChain
elif name == "ConversationChain":
from langchain.chains import ConversationChain
_warn_on_import(name, replacement="langchain.chains.ConversationChain")
return ConversationChain
elif name == "LLMBashChain":
raise ImportError(
"This module has been moved to langchain-experimental. "
"For more details: "
"https://github.com/langchain-ai/langchain/discussions/11352."
"To access this code, install it with `pip install langchain-experimental`."
"`from langchain_experimental.llm_bash.base "
"import LLMBashChain`"
)
elif name == "LLMChain":
from langchain.chains import LLMChain
_warn_on_import(name, replacement="langchain.chains.LLMChain")
return LLMChain
elif name == "LLMCheckerChain":
from langchain.chains import LLMCheckerChain
_warn_on_import(name, replacement="langchain.chains.LLMCheckerChain")
return LLMCheckerChain
elif name == "LLMMathChain":
from langchain.chains import LLMMathChain
_warn_on_import(name, replacement="langchain.chains.LLMMathChain")
return LLMMathChain
elif name == "QAWithSourcesChain":
from langchain.chains import QAWithSourcesChain
_warn_on_import(name, replacement="langchain.chains.QAWithSourcesChain")
return QAWithSourcesChain
elif name == "VectorDBQA":
from langchain.chains import VectorDBQA
_warn_on_import(name, replacement="langchain.chains.VectorDBQA")
return VectorDBQA
elif name == "VectorDBQAWithSourcesChain":
from langchain.chains import VectorDBQAWithSourcesChain
_warn_on_import(name, replacement="langchain.chains.VectorDBQAWithSourcesChain")
return VectorDBQAWithSourcesChain
elif name == "InMemoryDocstore":
from langchain_community.docstore import InMemoryDocstore
_warn_on_import(name, replacement="langchain.docstore.InMemoryDocstore")
return InMemoryDocstore
elif name == "Wikipedia":
from langchain_community.docstore import Wikipedia
_warn_on_import(name, replacement="langchain.docstore.Wikipedia")
return Wikipedia
elif name == "Anthropic":
from langchain_community.llms import Anthropic
_warn_on_import(name, replacement="langchain_community.llms.Anthropic")
return Anthropic
elif name == "Banana":
from langchain_community.llms import Banana
_warn_on_import(name, replacement="langchain_community.llms.Banana")
return Banana
elif name == "CerebriumAI":
from langchain_community.llms import CerebriumAI
_warn_on_import(name, replacement="langchain_community.llms.CerebriumAI")
return CerebriumAI
elif name == "Cohere":
from langchain_community.llms import Cohere
_warn_on_import(name, replacement="langchain_community.llms.Cohere")
return Cohere
elif name == "ForefrontAI":
from langchain_community.llms import ForefrontAI
_warn_on_import(name, replacement="langchain_community.llms.ForefrontAI")
return ForefrontAI
elif name == "GooseAI":
from langchain_community.llms import GooseAI
_warn_on_import(name, replacement="langchain_community.llms.GooseAI")
return GooseAI
elif name == "HuggingFaceHub":
from langchain_community.llms import HuggingFaceHub
_warn_on_import(name, replacement="langchain_community.llms.HuggingFaceHub")
return HuggingFaceHub
elif name == "HuggingFaceTextGenInference":
from langchain_community.llms import HuggingFaceTextGenInference
_warn_on_import(
name, replacement="langchain_community.llms.HuggingFaceTextGenInference"
)
return HuggingFaceTextGenInference
elif name == "LlamaCpp":
from langchain_community.llms import LlamaCpp
_warn_on_import(name, replacement="langchain_community.llms.LlamaCpp")
return LlamaCpp
elif name == "Modal":
from langchain_community.llms import Modal
_warn_on_import(name, replacement="langchain_community.llms.Modal")
return Modal
elif name == "OpenAI":
from langchain_community.llms import OpenAI
_warn_on_import(name, replacement="langchain_community.llms.OpenAI")
return OpenAI
elif name == "Petals":
from langchain_community.llms import Petals
_warn_on_import(name, replacement="langchain_community.llms.Petals")
return Petals
elif name == "PipelineAI":
from langchain_community.llms import PipelineAI
_warn_on_import(name, replacement="langchain_community.llms.PipelineAI")
return PipelineAI
elif name == "SagemakerEndpoint":
from langchain_community.llms import SagemakerEndpoint
_warn_on_import(name, replacement="langchain_community.llms.SagemakerEndpoint")
return SagemakerEndpoint
elif name == "StochasticAI":
from langchain_community.llms import StochasticAI
_warn_on_import(name, replacement="langchain_community.llms.StochasticAI")
return StochasticAI
elif name == "Writer":
from langchain_community.llms import Writer
_warn_on_import(name, replacement="langchain_community.llms.Writer")
return Writer
elif name == "HuggingFacePipeline":
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
_warn_on_import(
name,
replacement="langchain_community.llms.huggingface_pipeline.HuggingFacePipeline",
)
return HuggingFacePipeline
elif name == "FewShotPromptTemplate":
from langchain_core.prompts import FewShotPromptTemplate
_warn_on_import(
name, replacement="langchain_core.prompts.FewShotPromptTemplate"
)
return FewShotPromptTemplate
elif name == "Prompt":
from langchain_core.prompts import PromptTemplate
_warn_on_import(name, replacement="langchain_core.prompts.PromptTemplate")
# it's renamed as prompt template anyways
# this is just for backwards compat
return PromptTemplate
elif name == "PromptTemplate":
from langchain_core.prompts import PromptTemplate
_warn_on_import(name, replacement="langchain_core.prompts.PromptTemplate")
return PromptTemplate
elif name == "BasePromptTemplate":
from langchain_core.prompts import BasePromptTemplate
_warn_on_import(name, replacement="langchain_core.prompts.BasePromptTemplate")
return BasePromptTemplate
elif name == "ArxivAPIWrapper":
from langchain_community.utilities import ArxivAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.ArxivAPIWrapper"
)
return ArxivAPIWrapper
elif name == "GoldenQueryAPIWrapper":
from langchain_community.utilities import GoldenQueryAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.GoldenQueryAPIWrapper"
)
return GoldenQueryAPIWrapper
elif name == "GoogleSearchAPIWrapper":
from langchain_community.utilities import GoogleSearchAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.GoogleSearchAPIWrapper"
)
return GoogleSearchAPIWrapper
elif name == "GoogleSerperAPIWrapper":
from langchain_community.utilities import GoogleSerperAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.GoogleSerperAPIWrapper"
)
return GoogleSerperAPIWrapper
elif name == "PowerBIDataset":
from langchain_community.utilities import PowerBIDataset
_warn_on_import(
name, replacement="langchain_community.utilities.PowerBIDataset"
)
return PowerBIDataset
elif name == "SearxSearchWrapper":
from langchain_community.utilities import SearxSearchWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.SearxSearchWrapper"
)
return SearxSearchWrapper
elif name == "WikipediaAPIWrapper":
from langchain_community.utilities import WikipediaAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.WikipediaAPIWrapper"
)
return WikipediaAPIWrapper
elif name == "WolframAlphaAPIWrapper":
from langchain_community.utilities import WolframAlphaAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.WolframAlphaAPIWrapper"
)
return WolframAlphaAPIWrapper
elif name == "SQLDatabase":
from langchain_community.utilities import SQLDatabase
_warn_on_import(name, replacement="langchain_community.utilities.SQLDatabase")
return SQLDatabase
elif name == "FAISS":
from langchain_community.vectorstores import FAISS
_warn_on_import(name, replacement="langchain_community.vectorstores.FAISS")
return FAISS
elif name == "ElasticVectorSearch":
from langchain_community.vectorstores import ElasticVectorSearch
_warn_on_import(
name, replacement="langchain_community.vectorstores.ElasticVectorSearch"
)
return ElasticVectorSearch
# For backwards compatibility
elif name == "SerpAPIChain" or name == "SerpAPIWrapper":
from langchain_community.utilities import SerpAPIWrapper
_warn_on_import(
name, replacement="langchain_community.utilities.SerpAPIWrapper"
)
return SerpAPIWrapper
elif name == "verbose":
from langchain.globals import _verbose
_warn_on_import(
name,
replacement=(
"langchain.globals.set_verbose() / langchain.globals.get_verbose()"
),
)
return _verbose
elif name == "debug":
from langchain.globals import _debug
_warn_on_import(
name,
replacement=(
"langchain.globals.set_debug() / langchain.globals.get_debug()"
),
)
return _debug
elif name == "llm_cache":
from langchain.globals import _llm_cache
_warn_on_import(
name,
replacement=(
"langchain.globals.set_llm_cache() / langchain.globals.get_llm_cache()"
),
)
return _llm_cache
else:
raise AttributeError(f"Could not find: {name}")
|
(name: str) -> Any
|
69,713 |
langchain
|
_warn_on_import
|
Warn on import of deprecated module.
|
def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
"""Warn on import of deprecated module."""
from langchain._api.interactive_env import is_interactive_env
if is_interactive_env():
# No warnings for interactive environments.
# This is done to avoid polluting the output of interactive environments
# where users rely on auto-complete and may trigger this warning
# even if they are not using any deprecated modules
return
if replacement:
warnings.warn(
f"Importing {name} from langchain root module is no longer supported. "
f"Please use {replacement} instead."
)
else:
warnings.warn(
f"Importing {name} from langchain root module is no longer supported."
)
|
(name: str, replacement: Optional[str] = None) -> NoneType
|
69,714 |
langchain_core._api.deprecation
|
surface_langchain_deprecation_warnings
|
Unmute LangChain deprecation warnings.
|
def surface_langchain_deprecation_warnings() -> None:
"""Unmute LangChain deprecation warnings."""
warnings.filterwarnings(
"default",
category=LangChainPendingDeprecationWarning,
)
warnings.filterwarnings(
"default",
category=LangChainDeprecationWarning,
)
|
() -> NoneType
|
69,716 |
fastparquet.util
|
ParquetException
|
Generic Exception related to unexpected data format when
reading parquet file.
|
class ParquetException(Exception):
"""Generic Exception related to unexpected data format when
reading parquet file."""
pass
| null |
69,717 |
fastparquet.api
|
ParquetFile
|
The metadata of a parquet file or collection
Reads the metadata (row-groups and schema definition) and provides
methods to extract the data from the files.
Note that when reading parquet files partitioned using directories
(i.e. using the hive/drill scheme), an attempt is made to coerce
the partition values to a number, datetime or timedelta. Fastparquet
cannot read a hive/drill parquet file with partition names which coerce
to the same value, such as "0.7" and ".7".
Parameters
----------
fn: path/URL string or list of paths
Location of the data. If a directory, will attempt to read a file
"_metadata" within that directory. If a list of paths, will assume
that they make up a single parquet data set. This parameter can also
be any file-like object, in which case this must be a single-file
dataset.
verify: bool [False]
test file start/end byte markers
open_with: function
With the signature `func(path, mode)`, returns a context which
evaluated to a file open for reading. Defaults to the built-in `open`.
root: str
If passing a list of files, the top directory of the data-set may
be ambiguous for partitioning where the upmost field has only one
value. Use this to specify the dataset root directory, if required.
fs: fsspec-compatible filesystem
You can use this instead of open_with (otherwise, it will be inferred)
pandas_nulls: bool (True)
If True, columns that are int or bool in parquet, but have nulls, will become
pandas nullale types (Uint, Int, boolean). If False (the only behaviour
prior to v0.7.0), both kinds will be cast to float, and nulls will be NaN
unless pandas metadata indicates that the original datatypes were nullable.
Pandas nullable types were introduces in v1.0.0, but were still marked as
experimental in v1.3.0.
Attributes
----------
fn: path/URL
Of '_metadata' file.
basepath: path/URL
Of directory containing files of parquet dataset.
cats: dict
Columns derived from hive/drill directory information, with known
values for each column.
categories: list
Columns marked as categorical in the extra metadata (meaning the
data must have come from pandas).
columns: list of str
The data columns available
count: int
Total number of rows
dtypes: dict
Expected output types for each column
file_scheme: str
'simple': all row groups are within the same file; 'hive': all row
groups are in other files; 'mixed': row groups in this file and others
too; 'empty': no row groups at all.
info: dict
Combination of some of the other attributes
key_value_metadata: dict
Additional information about this data's origin, e.g., pandas
description, and custom metadata defined by user.
row_groups: list
Thrift objects for each row group
schema: schema.SchemaHelper
print this for a representation of the column structure
selfmade: bool
If this file was created by fastparquet
statistics: dict
Max/min/count of each column chunk
fs: fsspec-compatible filesystem
You can use this instead of open_with (otherwise, it will be inferred)
|
class ParquetFile(object):
"""The metadata of a parquet file or collection
Reads the metadata (row-groups and schema definition) and provides
methods to extract the data from the files.
Note that when reading parquet files partitioned using directories
(i.e. using the hive/drill scheme), an attempt is made to coerce
the partition values to a number, datetime or timedelta. Fastparquet
cannot read a hive/drill parquet file with partition names which coerce
to the same value, such as "0.7" and ".7".
Parameters
----------
fn: path/URL string or list of paths
Location of the data. If a directory, will attempt to read a file
"_metadata" within that directory. If a list of paths, will assume
that they make up a single parquet data set. This parameter can also
be any file-like object, in which case this must be a single-file
dataset.
verify: bool [False]
test file start/end byte markers
open_with: function
With the signature `func(path, mode)`, returns a context which
evaluated to a file open for reading. Defaults to the built-in `open`.
root: str
If passing a list of files, the top directory of the data-set may
be ambiguous for partitioning where the upmost field has only one
value. Use this to specify the dataset root directory, if required.
fs: fsspec-compatible filesystem
You can use this instead of open_with (otherwise, it will be inferred)
pandas_nulls: bool (True)
If True, columns that are int or bool in parquet, but have nulls, will become
pandas nullale types (Uint, Int, boolean). If False (the only behaviour
prior to v0.7.0), both kinds will be cast to float, and nulls will be NaN
unless pandas metadata indicates that the original datatypes were nullable.
Pandas nullable types were introduces in v1.0.0, but were still marked as
experimental in v1.3.0.
Attributes
----------
fn: path/URL
Of '_metadata' file.
basepath: path/URL
Of directory containing files of parquet dataset.
cats: dict
Columns derived from hive/drill directory information, with known
values for each column.
categories: list
Columns marked as categorical in the extra metadata (meaning the
data must have come from pandas).
columns: list of str
The data columns available
count: int
Total number of rows
dtypes: dict
Expected output types for each column
file_scheme: str
'simple': all row groups are within the same file; 'hive': all row
groups are in other files; 'mixed': row groups in this file and others
too; 'empty': no row groups at all.
info: dict
Combination of some of the other attributes
key_value_metadata: dict
Additional information about this data's origin, e.g., pandas
description, and custom metadata defined by user.
row_groups: list
Thrift objects for each row group
schema: schema.SchemaHelper
print this for a representation of the column structure
selfmade: bool
If this file was created by fastparquet
statistics: dict
Max/min/count of each column chunk
fs: fsspec-compatible filesystem
You can use this instead of open_with (otherwise, it will be inferred)
"""
_pdm = None
_kvm = None
_categories = None
def __init__(self, fn, verify=False, open_with=default_open, root=False,
sep=None, fs=None, pandas_nulls=True, dtypes=None):
self.pandas_nulls = pandas_nulls
self._base_dtype = dtypes
self.tz = None
self._columns_dtype = None
if open_with is default_open and fs is None:
fs = fsspec.filesystem("file")
elif fs is not None:
open_with = fs.open
else:
fs = getattr(open_with, "__self__", None)
if fs is None:
fs, fn, open_with, mkdirs = get_fs(fn, open_with, None)
if isinstance(fn, (tuple, list)):
if root and fs is not None:
root = fs._strip_protocol(root)
basepath, fmd = metadata_from_many(fn, verify_schema=verify,
open_with=open_with, root=root,
fs=fs)
writer.consolidate_categories(fmd)
self.fn = join_path(
basepath, '_metadata') if basepath else '_metadata'
self.fmd = fmd
self._set_attrs()
elif hasattr(fn, 'read'):
# file-like
self.fn = None
self._parse_header(fn, verify)
if self.file_scheme not in ['simple', 'empty']:
raise ValueError('Cannot use file-like input '
'with multi-file data')
open_with = lambda *args, **kwargs: fn
elif isinstance(fs, fsspec.AbstractFileSystem):
if fs.isfile(fn):
self.fn = join_path(fn)
with open_with(fn, 'rb') as f:
self._parse_header(f, verify)
if root:
paths = [fn.replace(root, "")]
self.file_scheme, self.cats = paths_to_cats(paths, None)
elif "*" in fn or fs.isdir(fn):
fn2 = join_path(fn, '_metadata')
if fs.exists(fn2):
self.fn = fn2
with open_with(fn2, 'rb') as f:
self._parse_header(f, verify)
fn = fn2
else:
# TODO: get details from fs here, rather than do suffix cat in
# metadata_from_many
if "*" in fn:
allfiles = fs.glob(fn)
else:
allfiles = [f for f in fs.find(fn) if
f.endswith(".parquet") or f.endswith(".parq")]
root = root or fn
if not allfiles:
raise ValueError("No files in dir")
if root:
root = fs._strip_protocol(root)
basepath, fmd = metadata_from_many(allfiles, verify_schema=verify,
open_with=open_with, root=root,
fs=fs)
writer.consolidate_categories(fmd)
self.fn = join_path(basepath, '_metadata') if basepath \
else '_metadata'
self.fmd = fmd
self._set_attrs()
self.fs = fs
else:
raise FileNotFoundError(fn)
else:
done = False
try:
self.fn = fn
f = open_with(fn)
self._parse_header(f, verify)
done = True
except IOError:
pass
if not done:
# allow this to error with FileNotFound or whatever
try:
self.fn = join_path(fn, "_metadata")
f = open_with(self.fn)
self._parse_header(f, verify)
except IOError as e:
raise ValueError("Opening directories without a _metadata requires"
"a filesystem compatible with fsspec") from e
self.open = open_with
self._statistics = None
def _parse_header(self, f, verify=True):
if self.fn and self.fn.endswith("_metadata"):
# no point attempting to read footer only for pure metadata
data = f.read()[4:-8]
self._head_size = len(data)
else:
try:
f.seek(0)
if verify:
assert f.read(4) == b'PAR1'
f.seek(-8, 2)
head_size = struct.unpack('<I', f.read(4))[0]
if verify:
assert f.read() == b'PAR1'
self._head_size = head_size
f.seek(-(head_size + 8), 2)
data = f.read(head_size)
except (AssertionError, struct.error):
raise ParquetException('File parse failed: %s' % self.fn)
try:
fmd = from_buffer(data, "FileMetaData")
except Exception:
raise ParquetException('Metadata parse failed: %s' % self.fn)
# for rg in fmd.row_groups:
for rg in fmd[4]:
# chunks = rg.columns
chunks = rg[1]
if chunks:
chunk = chunks[0]
# s = chunk.file_path
s = chunk.get(1)
if s:
# chunk.file_path = s.decode()
chunk[1] = s.decode()
self.fmd = fmd
self._set_attrs()
def _set_attrs(self):
fmd = self.fmd
self.version = fmd.version
self._schema = fmd.schema
self.row_groups = fmd.row_groups or []
self.created_by = fmd.created_by
self.schema = schema.SchemaHelper(self._schema)
self.selfmade = (
b"fastparquet" in self.created_by if self.created_by is not None
else False
)
self._read_partitions()
self._dtypes()
@property
def helper(self):
return self.schema
@property
def columns(self):
""" Column names """
return [_ for _ in self.dtypes if _ not in self.cats]
@property
def statistics(self):
if self._statistics is None:
self._statistics = statistics(self)
return self._statistics
@property
def key_value_metadata(self):
if self._kvm is None:
self._kvm = {
ensure_str(k.key, ignore_error=True): ensure_str(k.value, ignore_error=True)
for k in self.fmd.key_value_metadata or []}
return self._kvm
@property
def partition_meta(self):
return {col['field_name']: col for col in self.pandas_metadata.get('partition_columns', [])}
@property
def basepath(self):
return re.sub(r'_metadata(/)?$', '', self.fn).rstrip('/')
def _read_partitions(self):
# paths = [rg.columns[0].file_path] ... if rg.columns]
paths = [rg[1][0].get(1, "") for rg in self.row_groups if rg[1]]
self.file_scheme, self.cats = paths_to_cats(paths, self.partition_meta)
def head(self, nrows, **kwargs):
"""Get the first nrows of data
This will load the whole of the first valid row-group for the given
columns.
kwargs can include things like columns, filters, etc., with the same
semantics as to_pandas(). If filters are applied, it may happen that
data is so reduced that 'nrows' is not ensured (fewer rows).
returns: dataframe
"""
# TODO: implement with truncated assign and early exit
# from reading
total_rows = 0
for i, rg in enumerate(self.row_groups):
total_rows += rg.num_rows
if total_rows >= nrows:
break
return self[:i+1].to_pandas(**kwargs).head(nrows)
def __getitem__(self, item):
"""Select among the row-groups using integer/slicing"""
import copy
new_rgs = self.row_groups[item]
if not isinstance(new_rgs, list):
new_rgs = [new_rgs]
new_pf = object.__new__(ParquetFile)
fmd = copy.copy(self.fmd)
fmd.row_groups = new_rgs
new_pf.__setstate__(
{"fn": self.fn, "open": self.open, "fmd": fmd,
"pandas_nulls": self.pandas_nulls, "_base_dtype": self._base_dtype,
"tz": self.tz, "_columns_dtype": self._columns_dtype}
)
new_pf._set_attrs()
return new_pf
def __len__(self):
"""Return number of row groups."""
if self.fmd.row_groups:
return len(self.fmd.row_groups)
else:
return 0
def __bool__(self):
"""Return True, takes precedence over `__len__`."""
return True
def row_group_filename(self, rg):
if rg.columns and rg.columns[0].file_path:
base = self.basepath
if base:
return join_path(base, rg.columns[0].file_path)
else:
return rg.columns[0].file_path
else:
return self.fn
def read_row_group_file(self, rg, columns, categories, index=None,
assign=None, partition_meta=None, row_filter=False,
infile=None):
""" Open file for reading, and process it as a row-group
assign is None if this method is called directly (not from to_pandas),
in which case we return the resultant dataframe
row_filter can be:
- False (don't do row filtering)
- a list of filters (do filtering here for this one row-group;
only makes sense if assign=None
- bool array with a size equal to the number of rows in this group
and the length of the assign arrays
"""
categories = self.check_categories(categories)
fn = self.row_group_filename(rg)
ret = False
if assign is None:
if row_filter and isinstance(row_filter, list):
cs = self._columns_from_filters(row_filter)
df = self.read_row_group_file(
rg, cs, categories, index=False,
infile=infile, row_filter=False)
row_filter = self._column_filter(df, filters=row_filter)
size = row_filter.sum()
if size == rg.num_rows:
row_filter = False
else:
size = rg.num_rows
df, assign = self.pre_allocate(
size, columns, categories, index)
if "PANDAS_ATTRS" in self.key_value_metadata:
import json
df.attrs = json.loads(self.key_value_metadata["PANDAS_ATTRS"])
ret = True
f = infile or self.open(fn, mode='rb')
core.read_row_group(
f, rg, columns, categories, self.schema, self.cats,
selfmade=self.selfmade, index=index,
assign=assign, scheme=self.file_scheme, partition_meta=partition_meta,
row_filter=row_filter
)
if ret:
return df
def iter_row_groups(self, filters=None, **kwargs):
"""
Iterate a dataset by row-groups
If filters is given, omits row-groups that fail the filer
(saving execution time)
Returns
-------
Generator yielding one Pandas data-frame per row-group.
"""
rgs = filter_row_groups(self, filters) if filters else self.row_groups
for rg in rgs:
i = self.row_groups.index(rg)
df = self[i].to_pandas(filters=filters, **kwargs)
if not df.empty:
yield df
def remove_row_groups(self, rgs, sort_pnames:bool=False,
write_fmd:bool=True, open_with=default_open,
remove_with=None):
"""
Remove list of row groups from disk. `ParquetFile` metadata are
updated accordingly. This method can not be applied if file scheme is
simple.
Parameter
---------
rgs: row group or list of row groups
List of row groups to be removed from disk.
sort_pnames : bool, default False
Align name of part files with position of the 1st row group they
contain. Only used if `file_scheme` of parquet file is set to
`hive` or `drill`.
write_fmd: bool, True
Write updated common metadata to disk.
open_with: function
When called with f(path, mode), returns an open file-like object.
remove_with: function
When called with f(path) removes the file or directory given
(and any contained files). Not required if this ParquetFile has
a .fs file system attribute
"""
if not isinstance(rgs, list):
if isinstance(rgs, ThriftObject) or isinstance(rgs, dict):
# Case 'rgs' is a single row group ('ThriftObject' or 'dict').
rgs = [rgs]
else:
# Use `list()` here, not `[]`, as the latter does not transform
# generator or tuple into list but encapsulates them in a list.
rgs = list(rgs)
if rgs:
if self.file_scheme == 'simple':
raise ValueError("Not possible to remove row groups when file "
"scheme is 'simple'.")
if remove_with is None:
if hasattr(self, 'fs'):
remove_with = self.fs.rm
else:
remove_with = default_remove
rgs_to_remove = row_groups_map(rgs)
if (b"fastparquet" not in self.created_by
or self.file_scheme == 'flat'):
# Check if some files contain row groups both to be removed and
# to be kept.
all_rgs = row_groups_map(self.fmd.row_groups)
for file in rgs_to_remove:
if len(rgs_to_remove[file]) < len(all_rgs[file]):
raise ValueError(
f"File {file} contains row groups both to be kept "
"and to be removed. Removing row groups partially "
"from a file is not possible.")
if rgs != self.fmd.row_groups:
rg_new = self.fmd.row_groups
else:
# Deep copy required if 'rg_new' and 'rgs' points both to
# 'self.fmd.row_groups'.
from copy import deepcopy
rg_new = deepcopy(self.fmd.row_groups)
for rg in rgs:
rg_new.remove(rg)
self.fmd.num_rows -= rg.num_rows
self.fmd.row_groups = rg_new
try:
basepath = self.basepath
remove_with([f'{basepath}/{file}' for file in rgs_to_remove])
except IOError:
pass
self._set_attrs()
if sort_pnames:
self._sort_part_names(False, open_with)
if write_fmd:
self._write_common_metadata(open_with)
def write_row_groups(self, data, row_group_offsets=None, sort_key=None,
sort_pnames:bool=False, compression=None,
write_fmd:bool=True, open_with=default_open,
mkdirs=None, stats="auto"):
"""Write data as new row groups to disk, with optional sorting.
Parameters
----------
data : pandas dataframe or iterable of pandas dataframe
Data to add to existing parquet dataset. Only columns are written
to disk. Row index is not kept.
If a dataframe, columns are checked against parquet file schema.
row_group_offsets: int or list of int
If int, row-groups will be approximately this many rows, rounded down
to make row groups about the same size;
If a list, the explicit index values to start new row groups;
If `None`, set to 50_000_000.
sort_key : function, default None
Sorting function used as `key` parameter for `row_groups.sort()`
function. This function is called once new row groups have been
added to list of existing ones.
If not provided, new row groups are only appended to existing ones
and the updated list of row groups is not sorted.
sort_pnames : bool, default False
Align name of part files with position of the 1st row group they
contain. Only used if `file_scheme` of parquet file is set to
`hive` or `drill`.
compression : str or dict, default None
Compression to apply to each column, e.g. ``GZIP`` or ``SNAPPY`` or
a ``dict`` like ``{"col1": "SNAPPY", "col2": None}`` to specify per
column compression types.
By default, do not compress.
Please, review full description of this parameter in `write`
docstring.
write_fmd : bool, default True
Write updated common metadata to disk.
open_with : function
When called with a f(path, mode), returns an open file-like object.
mkdirs : function
When called with a path/URL, creates any necessary dictionaries to
make that location writable, e.g., ``os.makedirs``. This is not
necessary if using the simple file scheme.
stats : True|False|list of str
Whether to calculate and write summary statistics.
If True (default), do it for every column;
If False, never do;
If a list of str, do it only for those specified columns.
"auto" means True for any int/float or timemstamp column, False
otherwise. This will become the default in a future release.
"""
from .writer import write_simple, write_multi
partition_on = list(self.cats)
if isinstance(data, pd.DataFrame):
self_cols = sorted(self.columns + partition_on)
if self_cols != sorted(data.columns):
diff_cols = set(data.columns) ^ set(self_cols)
raise ValueError(
f'Column names of new data are {sorted(data.columns)}. '
f'But column names in existing file are {self_cols}. '
f'{diff_cols} are columns being either only in existing '
'file or only in new data. This is not possible.')
if (self.file_scheme == 'simple'
or (self.file_scheme == 'empty' and self.fn[-9:] != '_metadata')):
# Case 'simple'.
write_simple(self.fn, data, self.fmd,
row_group_offsets=row_group_offsets,
compression=compression, open_with=open_with,
has_nulls=None, append=True, stats=stats)
else:
# Case 'hive' or 'drill'.
write_multi(self.basepath, data, self.fmd,
row_group_offsets=row_group_offsets,
compression=compression, file_scheme=self.file_scheme,
write_fmd=False, open_with=open_with, mkdirs=mkdirs,
partition_on=partition_on, append=True, stats=stats)
if sort_key:
# Not using 'sort()' because 'row_groups' is a ThriftObject,
# not a list.
self.fmd.row_groups = sorted(self.fmd.row_groups, key=sort_key)
if sort_pnames:
self._sort_part_names(False, open_with)
if write_fmd:
self._write_common_metadata(open_with)
self._set_attrs()
def _sort_part_names(self, write_fmd:bool=True, open_with=default_open):
"""Align parquet files id to that of the first row group they contain.
This method only manages files which name follows pattern
"part.{id}.parquet". Field `id` is then aligned to the index of the
first row group it contains. The index of a row groups is its position
in row group list.
Parameters
----------
write_fmd : bool, default True
Write updated common metadata to disk.
open_with : function
When called with a f(path, mode), returns an open file-like object.
Only needed if `write_fmd` is `True`.
"""
from .writer import part_ids
pids = part_ids(self.fmd.row_groups)
if pids:
# Keep only items for which row group position does not match part
# name id.
pids = dict(filter(lambda item: item[0] != item[1][0],
pids.items()))
basepath = self.basepath
# Give temporary names in a 1st pass to prevent overwritings.
for pid in pids:
item = pids[pid]
rgid, fname = item[0], item[1]
src = f'{basepath}/{fname}'
parts = partitions(fname)
dst = join_path(basepath, parts, f'part.{rgid}.parquet.tmp')
self.fs.rename(src, dst)
# Give definitive names in a 2nd pass.
for pid in pids:
item = pids[pid]
rgid, fname = item[0], item[1]
parts = partitions(fname)
src = join_path(basepath, parts, f'part.{rgid}.parquet.tmp')
dst_part = join_path(parts, f'part.{rgid}.parquet')
dst = join_path(basepath, dst_part)
self.fs.rename(src, dst)
for col in self.fmd.row_groups[rgid].columns:
col.file_path = dst_part
if write_fmd:
self._write_common_metadata(open_with)
def _write_common_metadata(self, open_with=default_open):
"""
Write common metadata to disk.
Parameter
---------
open_with: function
When called with a f(path, mode), returns an open file-like object.
"""
from .writer import write_common_metadata
if self.file_scheme == 'simple':
raise ValueError("Not possible to write common metadata when file \
scheme is 'simple'.")
fmd = self.fmd
write_common_metadata(self.fn, fmd, open_with, no_row_groups=False)
# replace '_metadata' with '_common_metadata'
fn = f'{self.fn[:-9]}_common_metadata'
write_common_metadata(fn, fmd, open_with)
def _get_index(self, index=None):
if index is None:
index = [i if isinstance(i, str) else i["name"]
for i in self.pandas_metadata.get('index_columns', [])
if isinstance(i, str) or i.get("kind") != "range"
]
if isinstance(index, str):
index = [index]
return index
def _columns_from_filters(self, filters):
return [
c for c in
set(sum([[f[0]]
if isinstance(f[0], str)
else [g[0] for g in f] for f in filters], []))
if c not in self.cats
]
def _column_filter(self, df, filters):
out = np.zeros(len(df), dtype=bool)
for or_part in filters:
if isinstance(or_part[0], str):
name, op, val = or_part
if name in self.cats:
continue
if op == 'in':
out |= df[name].isin(val).values
elif op == "not in":
out |= ~df[name].isin(val).values
elif op in ops:
out |= ops[op](df[name], val).values
elif op == "~":
out |= ~df[name].values
else:
and_part = np.ones(len(df), dtype=bool)
for name, op, val in or_part:
if name in self.cats:
continue
if op == 'in':
and_part &= df[name].isin(val).values
elif op == "not in":
and_part &= ~df[name].isin(val).values
elif op in ops:
and_part &= ops[op](df[name].values, val)
elif op == "~":
and_part &= ~df[name].values
out |= and_part
return out
def to_pandas(self, columns=None, categories=None, filters=[],
index=None, row_filter=False, dtypes=None):
"""
Read data from parquet into a Pandas dataframe.
Parameters
----------
columns: list of names or `None`
Column to load (see `ParquetFile.columns`). Any columns in the
data not in this list will be ignored. If `None`, read all columns.
categories: list, dict or `None`
If a column is encoded using dictionary encoding in every row-group
and its name is also in this list, it will generate a Pandas
Category-type column, potentially saving memory and time. If a
dict {col: int}, the value indicates the number of categories,
so that the optimal data-dtype can be allocated. If ``None``,
will automatically set *if* the data was written from pandas.
filters: list of list of tuples or list of tuples
To filter out data.
Filter syntax: [[(column, op, val), ...],...]
where op is [==, =, >, >=, <, <=, !=, in, not in]
The innermost tuples are transposed into a set of filters applied
through an `AND` operation.
The outer list combines these sets of filters through an `OR`
operation.
A single list of tuples can also be used, meaning that no `OR`
operation between set of filters is to be conducted.
index: string or list of strings or False or None
Column(s) to assign to the (multi-)index. If None, index is
inferred from the metadata (if this was originally pandas data); if
the metadata does not exist or index is False, index is simple
sequential integers.
row_filter: bool or boolean ndarray
Whether filters are applied to whole row-groups (False, default)
or row-wise (True, experimental). The latter requires two passes of
any row group that may contain valid rows, but can be much more
memory-efficient, especially if the filter columns are not required
in the output.
If boolean array, it is applied as custom row filter. In this case,
'filter' parameter is ignored, and length of the array has to be
equal to the total number of rows.
Returns
-------
Pandas data-frame
"""
rgs = filter_row_groups(self, filters) if filters else self.row_groups
index = self._get_index(index)
if columns is not None:
columns = columns[:]
else:
columns = self.columns + list(self.cats)
if index:
columns += [i for i in index if i not in columns]
check_column_names(self.columns + list(self.cats), columns, categories)
if row_filter is not False:
if filters and row_filter is True:
# Rows are selected as per filters.
# TODO: special case when filter columns are also in output
cs = self._columns_from_filters(filters)
df = self.to_pandas(columns=cs, filters=filters, row_filter=False,
index=False)
sel = self._column_filter(df, filters=filters)
else:
# Row are selected as per custom 'sel'.
if sum(rg.num_rows for rg in rgs) != len(row_filter):
raise ValueError('Provided boolean array for custom row \
selection does not match number of rows in DataFrame.')
sel = row_filter
size = sel.sum()
selected = []
start = 0
for rg in rgs[:]:
selected.append(sel[start:start+rg.num_rows])
start += rg.num_rows
else:
size = sum(rg.num_rows for rg in rgs)
selected = [None] * len(rgs) # just to fill zip, below
df, views = self.pre_allocate(size, columns, categories, index, dtypes=dtypes)
if "PANDAS_ATTRS" in self.key_value_metadata:
import json
df.attrs = json.loads(self.key_value_metadata["PANDAS_ATTRS"])
start = 0
if self.file_scheme == 'simple':
infile = self.open(self.fn, 'rb')
else:
infile = None
for rg, sel in zip(rgs, selected):
thislen = sel.sum() if sel is not None else rg.num_rows
if thislen == rg.num_rows:
# all good; noop if no row filtering
sel = None
elif thislen == 0:
# no valid rows
continue
parts = {name: (v if name.endswith('-catdef')
else v[start:start + thislen])
for (name, v) in views.items()}
self.read_row_group_file(rg, columns, categories, index,
assign=parts, partition_meta=self.partition_meta,
row_filter=sel, infile=infile)
start += thislen
return df
def pre_allocate(self, size, columns, categories, index, dtypes=None):
if dtypes is not None:
columns = list(dtypes)
else:
dtypes = self._dtypes(categories)
categories = self.check_categories(categories)
cats = {k: v for k, v in self.cats.items() if k in columns}
df, arrs = _pre_allocate(size, columns, categories, index, cats,
dtypes, self.tz, columns_dtype=self._columns_dtype)
i_no_name = re.compile(r"__index_level_\d+__")
if self.has_pandas_metadata:
md = self.pandas_metadata
if categories:
for c in md['columns']:
if c['name'] in categories and c['name'] in df and c['metadata']:
df[c['name']].dtype._ordered = c['metadata']['ordered']
if md.get('index_columns', False) and not (index or index is False):
if len(md['index_columns']) == 1:
ic = md['index_columns'][0]
if isinstance(ic, dict) and ic.get('kind') == 'range':
from pandas import RangeIndex
df.index = RangeIndex(
start=ic['start'],
stop=ic['start'] + size * ic['step'] + 1,
step=ic['step']
)[:size]
names = [(c['name'] if isinstance(c, dict) else c)
for c in md['index_columns']]
names = [None if n is None or i_no_name.match(n) else n
for n in names]
df.index.names = names
if md.get('column_indexes', False):
names = [(c['name'] if isinstance(c, dict) else c)
for c in md['column_indexes']]
names = [None if n is None or i_no_name.match(n) else n
for n in names]
if len(names) > 1:
df.columns = pd.MultiIndex.from_tuples(
[ast.literal_eval(c) for c in df.columns if c not in df.index.names],
names=names
)
else:
df.columns.names = names
return df, arrs
def count(self, filters=None, row_filter=False):
""" Total number of rows
filters and row_filters have the same meaning as in to_pandas. Unless both are given,
this method will not need to decode any data
"""
if row_filter:
cs = self._columns_from_filters(filters)
df = self.to_pandas(columns=cs, filters=filters, row_filter=False,
index=False)
return self._column_filter(df, filters=filters).sum()
rgs = filter_row_groups(self, filters)
return sum(rg.num_rows for rg in rgs)
@property
def info(self):
""" Dataset summary """
return {'name': self.fn, 'columns': self.columns,
'partitions': list(self.cats), 'rows': self.count(),
"row_groups": len(self.row_groups)}
def check_categories(self, cats):
categ = self.categories
if not self.has_pandas_metadata:
return cats or {}
if cats is None:
return categ or {}
if set(cats) - set(categ) and len(self.row_groups) > 1:
raise TypeError("Attempt to read as category a field that "
"was not stored as such")
if isinstance(cats, dict):
return cats
out = {k: v for k, v in categ.items() if k in cats}
out.update({c: pd.RangeIndex(0, 2**14) for c in cats if c not in categ})
return out
@property
def has_pandas_metadata(self):
if self._pdm:
return True
if self.fmd.key_value_metadata is None:
return False
return bool(self.key_value_metadata.get('pandas', False))
@property
def pandas_metadata(self):
if self._pdm is None:
if self.has_pandas_metadata:
self._pdm = json_decoder()(self.key_value_metadata['pandas'])
else:
self._pdm = {}
return self._pdm
@property
def categories(self):
if self._categories is not None:
return self._categories
if self.has_pandas_metadata:
metadata = self.pandas_metadata
if "column_indexes" in metadata and len(metadata["column_indexes"]) > 0:
self._columns_dtype = metadata["column_indexes"][0]["numpy_type"]
else:
self._columns_dtype = None
cats = {}
for m in metadata['columns']:
if m['pandas_type'] != 'categorical':
continue
out = False
if b"fastparquet" in self.created_by:
# if pandas was categorical, we will have used dict encoding
cats[m['name']] = m['metadata']['num_categories']
continue
for rg in self.row_groups:
# but others (pyarrow) may have used dict for only some pages
if out:
break
for col in rg.columns:
if ".".join(col.meta_data.path_in_schema) != m['name']:
continue
if col.meta_data.encoding_stats:
if any(s.encoding not in [parquet_thrift.Encoding.PLAIN_DICTIONARY,
parquet_thrift.Encoding.RLE_DICTIONARY]
for s in col.meta_data.encoding_stats
if s.page_type in [parquet_thrift.PageType.DATA_PAGE_V2,
parquet_thrift.PageType.DATA_PAGE]):
out = True
break
if out is False:
cats[m['name']] = m['metadata']['num_categories']
self._categories = cats
return cats
# old track
vals = self.key_value_metadata.get("fastparquet.cats", None)
if vals:
self._categories = json_decoder()(vals)
return self._categories
else:
return {}
def _dtypes(self, categories=None):
""" Implied types of the columns in the schema """
import pandas as pd
if self._base_dtype is None:
if self.has_pandas_metadata:
md = self.pandas_metadata['columns']
md = {c['name']: c for c in md}
tz = {k: v["metadata"]['timezone'] for k, v in md.items()
if v.get('metadata', {}) and v.get('metadata', {}).get('timezone', None)}
else:
tz = None
md = None
self.tz = tz
dtype = OrderedDict((name, (converted_types.typemap(f, md=md)
if f.num_children in [None, 0] else np.dtype("O")))
for name, f in self.schema.root["children"].items()
if getattr(f, 'isflat', False) is False)
for i, (col, dt) in enumerate(dtype.copy().items()):
# int and bool columns produce masked pandas types, no need to
# promote types here
if dt.kind == "M":
if self.pandas_metadata and PANDAS_VERSION.major >= 2:
# get original resolution when pandas supports non-ns
dt = md[col]["numpy_type"]
if tz is not None and tz.get(col, False):
z = dataframe.tz_to_dt_tz(tz[col])
dt_series = pd.Series([], dtype=dt)
if PANDAS_VERSION.major >= 2 and dt_series.dt.tz is not None:
dt = dt_series.dt.tz_convert(z).dtype
else:
dt = dt_series.dt.tz_localize(z).dtype
dtype[col] = dt
elif dt in converted_types.nullable:
if self.pandas_metadata:
tt = md.get(col, {}).get("numpy_type")
if tt and ("int" in tt or "bool" in tt):
continue
# uint/int/bool columns that may have nulls become nullable
# skip is pandas_metadata gives original types
num_nulls = 0
for rg in self.row_groups:
if rg[3] == 0:
continue
st = rg[1][i][3].get(12)
if st is None:
num_nulls = True
break
if st.get(3):
num_nulls = True
break
if num_nulls:
if self.pandas_nulls:
dtype[col] = converted_types.nullable[dt]
else:
dtype[col] = np.float_()
elif dt == 'S12':
dtype[col] = 'M8[ns]'
self._base_dtype = dtype
dtype = self._base_dtype.copy()
categories = self.check_categories(categories)
for field in categories:
dtype[field] = 'category'
for cat in self.cats:
dtype[cat] = "category"
self.dtypes = dtype
return dtype
def __getstate__(self):
if self.fmd.row_groups is None:
self.fmd.row_groups = []
return {"fn": self.fn, "open": self.open, "fmd": self.fmd,
"pandas_nulls": self.pandas_nulls, "_base_dtype": self._base_dtype,
"tz": self.tz}
def __setstate__(self, state):
self.__dict__.update(state)
# Decode 'file_path'.
rgs = self.fmd[4] or []
# 4th condition should not be necessary, depends on 'deepcopy' version.
# https://github.com/dask/fastparquet/pull/731#issuecomment-1013507287
if (rgs and rgs[0][1] and rgs[0][1][0] and rgs[0][1][0].get(1)
and isinstance(rgs[0][1][0].get(1), bytes)):
# for rg in fmd.row_groups:
for rg in rgs:
# chunk = rg.columns[0]
chunk = rg[1][0]
# chunk.file_path = chunk.file_path.decode()
chunk[1] = chunk.get(1).decode()
self._set_attrs()
def __str__(self):
return "<Parquet File: %s>" % self.info
__repr__ = __str__
|
(fn, verify=False, open_with=<built-in function open>, root=False, sep=None, fs=None, pandas_nulls=True, dtypes=None)
|
69,718 |
fastparquet.api
|
__bool__
|
Return True, takes precedence over `__len__`.
|
def __bool__(self):
"""Return True, takes precedence over `__len__`."""
return True
|
(self)
|
69,719 |
fastparquet.api
|
__getitem__
|
Select among the row-groups using integer/slicing
|
def __getitem__(self, item):
"""Select among the row-groups using integer/slicing"""
import copy
new_rgs = self.row_groups[item]
if not isinstance(new_rgs, list):
new_rgs = [new_rgs]
new_pf = object.__new__(ParquetFile)
fmd = copy.copy(self.fmd)
fmd.row_groups = new_rgs
new_pf.__setstate__(
{"fn": self.fn, "open": self.open, "fmd": fmd,
"pandas_nulls": self.pandas_nulls, "_base_dtype": self._base_dtype,
"tz": self.tz, "_columns_dtype": self._columns_dtype}
)
new_pf._set_attrs()
return new_pf
|
(self, item)
|
69,720 |
fastparquet.api
|
__getstate__
| null |
def __getstate__(self):
if self.fmd.row_groups is None:
self.fmd.row_groups = []
return {"fn": self.fn, "open": self.open, "fmd": self.fmd,
"pandas_nulls": self.pandas_nulls, "_base_dtype": self._base_dtype,
"tz": self.tz}
|
(self)
|
69,721 |
fastparquet.api
|
__init__
| null |
def __init__(self, fn, verify=False, open_with=default_open, root=False,
sep=None, fs=None, pandas_nulls=True, dtypes=None):
self.pandas_nulls = pandas_nulls
self._base_dtype = dtypes
self.tz = None
self._columns_dtype = None
if open_with is default_open and fs is None:
fs = fsspec.filesystem("file")
elif fs is not None:
open_with = fs.open
else:
fs = getattr(open_with, "__self__", None)
if fs is None:
fs, fn, open_with, mkdirs = get_fs(fn, open_with, None)
if isinstance(fn, (tuple, list)):
if root and fs is not None:
root = fs._strip_protocol(root)
basepath, fmd = metadata_from_many(fn, verify_schema=verify,
open_with=open_with, root=root,
fs=fs)
writer.consolidate_categories(fmd)
self.fn = join_path(
basepath, '_metadata') if basepath else '_metadata'
self.fmd = fmd
self._set_attrs()
elif hasattr(fn, 'read'):
# file-like
self.fn = None
self._parse_header(fn, verify)
if self.file_scheme not in ['simple', 'empty']:
raise ValueError('Cannot use file-like input '
'with multi-file data')
open_with = lambda *args, **kwargs: fn
elif isinstance(fs, fsspec.AbstractFileSystem):
if fs.isfile(fn):
self.fn = join_path(fn)
with open_with(fn, 'rb') as f:
self._parse_header(f, verify)
if root:
paths = [fn.replace(root, "")]
self.file_scheme, self.cats = paths_to_cats(paths, None)
elif "*" in fn or fs.isdir(fn):
fn2 = join_path(fn, '_metadata')
if fs.exists(fn2):
self.fn = fn2
with open_with(fn2, 'rb') as f:
self._parse_header(f, verify)
fn = fn2
else:
# TODO: get details from fs here, rather than do suffix cat in
# metadata_from_many
if "*" in fn:
allfiles = fs.glob(fn)
else:
allfiles = [f for f in fs.find(fn) if
f.endswith(".parquet") or f.endswith(".parq")]
root = root or fn
if not allfiles:
raise ValueError("No files in dir")
if root:
root = fs._strip_protocol(root)
basepath, fmd = metadata_from_many(allfiles, verify_schema=verify,
open_with=open_with, root=root,
fs=fs)
writer.consolidate_categories(fmd)
self.fn = join_path(basepath, '_metadata') if basepath \
else '_metadata'
self.fmd = fmd
self._set_attrs()
self.fs = fs
else:
raise FileNotFoundError(fn)
else:
done = False
try:
self.fn = fn
f = open_with(fn)
self._parse_header(f, verify)
done = True
except IOError:
pass
if not done:
# allow this to error with FileNotFound or whatever
try:
self.fn = join_path(fn, "_metadata")
f = open_with(self.fn)
self._parse_header(f, verify)
except IOError as e:
raise ValueError("Opening directories without a _metadata requires"
"a filesystem compatible with fsspec") from e
self.open = open_with
self._statistics = None
|
(self, fn, verify=False, open_with=<built-in function open>, root=False, sep=None, fs=None, pandas_nulls=True, dtypes=None)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.