file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
marketplace.js |
return formatted;
};
var searchReports = function(search, callback) {
$.ajax({
url: './handlers/MarketplaceReportSearchHandler.ashx',
data: { language: languageId, instance: instanceId, search: JSON.stringify(search) },
success: function (d) {
if (callback) callback(d);
},
dataType: 'json',
type: 'POST'
});
}
var getStandardResults = function (reportId, standardId, profileId, callback) {
$.ajax({
url: './handlers/MarketplaceSeriesDataHandler.ashx',
data: { report: reportId, standard: standardId, profile: profileId },
success: function (d) {
if (callback) callback(d);
},
dataType: 'json',
type: 'POST'
});
}
var getCompanyDetails = function (companyId, profileId, languageId, callback) {
$.ajax({
url: './handlers/MarketplaceCompanyDetailsHandler.ashx',
data: { company: companyId, profile: profileId, language: languageId },
success: function (d) {
if (callback) callback(d);
},
dataType: 'json',
type: 'POST'
});
}
$(function () {
$('.form-group select').selectize({ clearOnOpen: true }).on('change', filterSearch);
$('#CompanyName').on('change', filterSearch);
$('#Filters input.percentslider')
.slider({tooltip: 'show',selection: 'after', orientation: 'horizontal', step: 5, min: -1, max: 100, value: -1})
.on('slide', function(ev){ $(ev.currentTarget).closest('.percentageFilter').find('span.percentage').text(-1 < ev.value ? ev.value + '%' : 'na');})
.on('slideStop', filterSearch)
.closest('.percentageFilter')
.append('<span class="percentage">na</span>');
filterSearch();
});
function max(a, b)
{
if (!a || !isNumber(a)) return b;
if (!b || !isNumber(b)) return a;
return Math.max(a, b);
}
function isNumber(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
function filterSearch() {
var fs = {};
fs.CompanyName = '';
fs.Products = [];
fs.ProductCategories = []
fs.ProducingCountries = [];
fs.ProducingRegions = [];
fs.StandardsCertified = []
fs.StandardsAssessed = [];
fs.Compliance = {};
fs.Compliance.Overall = max(-1, $('#overallPercentage').val());
fs.Compliance.Environment = max(-1, $('#environmentPercentage').val());
fs.Compliance.Social = max(-1, $('#sociallPercentage').val());
fs.Compliance.Economic = max(-1, $('#economicPercentage').val());
fs.Compliance.Quality = max(-1, $('#qualityPercentage').val());
fs.Compliance.BusinessIntegrity = max(-1, $('#businessIntegrityPercentage').val());
var sd;
sd = $('#CompanyName').val();
if (sd && 0 < sd.length) {
fs.CompanyName = sd;
}
sd = $('#DropDownProduct').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
var p = o.split(':')[2];
if (p && 0 < p) fs.Products.push(p);
else fs.ProductCategories.push(o.split(':')[0]);
});
}
sd = $('#DropDownProducingCountry').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
var p = o.split(':')[1];
if (p && 0 < p) fs.ProducingCountries.push(p);
else fs.ProducingRegions.push(o.split(':')[0]);
});
}
sd = $('#StandardsCertifiedList').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
fs.StandardsCertified.push(o);
});
}
sd = $('#StandardsAssessedList').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
fs.StandardsAssessed.push(o);
});
}
searchReports(fs, buildResultsTable);
}
var buildResultsTable = function (d) {
var results_container = $('#ResultsTable tbody');
var standard_template = '<span class="standard" data-id="{0}" data-name="{1}" data-logolarge="{3}"><a href="marketplace.aspx#"><img src="http://search.standardsmap.org:8080/{2}" title="{1}" width="45" height="45" /></a></span>';
var row_template = '<tr class="report_result" data-id="{0}"><td class="company"><a href="marketplace.aspx#">{1}</a></td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td><td><a href="http://search.standardsmap.org:8080/reportfinal.aspx?report={0}">View</a></td></tr>';
var report_template = '<tr class="standard_result" data-id="{0}" id="{3}-{0}"><td colspan="6"><span class="standard_title"><img src="http://search.standardsmap.org:8080/{2}" width="90" height="90" /><h4>{1}</h4></span><span id="data-{3}-{0}"></span></td></tr>';
var company_template = '<tr class="company_result" data-id="{0}" id="{1}-{0}"><td colspan="6"><span id="company-{1}-{0}"></span></td></tr>';
results_container.empty();
if (d) {
var rpt_list = [];
d.forEach(function (r, i) {
rpt_list.push(r.ReportID);
var standards = '';
if (r.Standards) {
r.Standards.forEach(function (s, i) { standards += standard_template.format(s.Id, s.Name, s.LogoSmall, s.LogoLarge); });
}
var row = $(row_template.format(r.ReportID, r.CompanyName, r.Product, r.Country, standards, formatDate(fixJsonDate(r.Modified))));
row.find('td.company a').on('click', function (ev) {
ev.preventDefault();
var result = $(ev.currentTarget).closest('.report_result');
var cmp_row = results_container.find('#' + r.ReportID + '-' + r.CompanyID + '.company_result');
// does the row exist? if so toggle visibility
if (cmp_row.length) {
cmp_row.toggle();
}
else {
cmp_row = result.after(company_template.format(r.CompanyID, r.ReportID));
getCompanyDetails(r.CompanyID, 0, 1, function (d) {
showCompanyDetails(d, '#company-' + r.ReportID + '-' + r.CompanyID);
});
}
});
results_container.append(row);
});
results_container.find('.standard a').on('click', function (ev) {
ev.preventDefault();
var std = $(ev.currentTarget).closest('.standard');
var result = std.closest('.report_result');
var id = std.attr('data-id');
var name = std.attr('data-name');
var logo = std.attr('data-logolarge');
var rid = result.attr('data-id');
var std_row = results_container.find('#' + rid + '-' + id + '.standard_result');
// does the row exist? if so toggle visibility
if (std_row.length) {
std_row.toggle();
}
else {
var cmp_row = result.next('.company_result');
if (cmp_row.length) {
std_row = cmp_row.after(report_template.format(id, name, logo, rid));
}
else {
std_row = result.after(report_template.format(id, name, logo, rid));
}
getStandardResults(rid, id, 0, function (d) {
showStandardResults(d.Data, '#data-' + rid + '-' + id);
});
}
});
$('#ReportIDs').val(rpt_list.join(','));
}
}
var fixJsonDate = function (dt) {
var date = new Date(parseInt(dt.replace(/[^0-9 +]/g, '')));
return date;
}
var formatDate = function (d) {
var dd = d.getDate();
if ( dd < 10 ) dd = '0' + dd;
var mm = d.getMonth()+1;
if ( mm < 10 ) mm = '0' + mm;
var yy = d.getFullYear() % 100;
if ( yy < 10 ) yy = '0' + yy;
return dd+'/'+mm+'/'+yy;
}
var showCompanyDetails = function (d, el) {
var template = '<span class="standard_title"><img src="http://search.standardsmap.org:8080/{5}" width="90" height="90"><h4>{1}</h4></span><span class="company_details"><p class="lead">{2}</p><p>Products/Services: { | {
var regexp = new RegExp('\\{' + i + '\\}', 'gi');
formatted = formatted.replace(regexp, arguments[i]);
} | conditional_block |
|
marketplace.js | rid));
}
else {
std_row = result.after(report_template.format(id, name, logo, rid));
}
getStandardResults(rid, id, 0, function (d) {
showStandardResults(d.Data, '#data-' + rid + '-' + id);
});
}
});
$('#ReportIDs').val(rpt_list.join(','));
}
}
var fixJsonDate = function (dt) {
var date = new Date(parseInt(dt.replace(/[^0-9 +]/g, '')));
return date;
}
var formatDate = function (d) {
var dd = d.getDate();
if ( dd < 10 ) dd = '0' + dd;
var mm = d.getMonth()+1;
if ( mm < 10 ) mm = '0' + mm;
var yy = d.getFullYear() % 100;
if ( yy < 10 ) yy = '0' + yy;
return dd+'/'+mm+'/'+yy;
}
var showCompanyDetails = function (d, el) {
var template = '<span class="standard_title"><img src="http://search.standardsmap.org:8080/{5}" width="90" height="90"><h4>{1}</h4></span><span class="company_details"><p class="lead">{2}</p><p>Products/Services: {6}<br />Destination markets: {7}</p><p>Contact: <a href="mailto:{4}">{4}</a><br />Website: <a href="http://search.standardsmap.org:8080/{3}" target="_blank">{3}</a></p></span>{8}';
var standards = '';
var standard_template = '<span class="standard" data-id="{0}" data-name="{1}" data-logolarge="{3}"><img src="http://search.standardsmap.org:8080/{2}" title="{1}" alt="{1}" width="45" height="45" /></span>';
if (d.Standards) {
standards = '<span class="standards_certified">';
d.Standards.forEach(function (s, i) { standards += standard_template.format(s.Id, s.Name, s.LogoSmall, s.LogoLarge); });
standards += '</span>';
}
$(el).append(template.format(d.Id, d.Name, d.Description, d.Website, d.ContactEmail, d.Logo, d.Products, d.DestinationMarkets, standards));
}
var showStandardResults = function (d, el) {
//var overview_template = '<table class="table overview_table"><thead><tr><th>Overall</th><th>Environment</th><th>Social</th><th>Economic</th><th>Quality management</th><th>Ethics</th></tr></thead><tbody><tr><td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td></tr></tbody></table>';
var overview_template = '<table class="overview_table"><caption>Latest compliance assessment {6}</caption><thead><tr class="hotspot-background-colors"><th class=""><i>∑</i>Overall</th><th class="hotspot-environment "><i class="icon-sm-environment"></i> Environment</th><th class="hotspot-social "><i class="icon-sm-social"></i> Social</th><th class="hotspot-economic "><i class="icon-sm-economic"></i> Economic</th><th class="hotspot-quality-management"><i class="icon-sm-quality"></i> Quality</th><th class="hotspot-ethics"><i class="icon-sm-ethics"></i> Ethics</th></tr></thead><tfoot><tr><td colspan="6">Met: {7} Nearly: {8} Missed: {9} Total: {10}</td></tr></tfoot><tbody><tr class="hotspot-background-colors"><td>{0}%</td><td class="hotspot-environment ">{1}%</td><td class="hotspot-social ">{2}%</td><td class="hotspot-economic ">{3}%</td><td class="hotspot-quality-management">{4}%</td><td class="hotspot-ethics">{5}%</td></tr></tbody></table>';
var nd = d[d.length - 1];
var history_template = '<table class="table history_table"><thead><tr><th>Date</th><th>Overall</th><th>Environment</th><th>Social</th><th>Economic</th><th>Quality</th><th>Ethics</th></tr></thead><tbody>{0}</tbody></table>';
var history_row_template = '<tr><td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td><td>{6}</td></tr>';
var html = '';
var seriesOv = { name: 'Overall', data: [] };
var seriesEn = { name: 'Environmental', data: [] };
var seriesSc = { name: 'Social', data: [] };
var seriesEc = { name: 'Economic', data: [] };
var seriesQm = { name: 'Quality', data: [] };
var seriesEt = { name: 'Ethics', data: [] };
for (var i = d.length - 1; i > -1; --i) {
var date = fixJsonDate(d[i].DateCreated);
var datestring = formatDate(date);
var datadate = Date.UTC(date.getFullYear(), date.getMonth(), date.getDate(), date.getHours(), date.getMinutes());
html += history_row_template.format(datestring, d[i].Ranking, d[i].RankingEnvironmental, d[i].RankingSocial, d[i].RankingEconomic, d[i].RankingQualitymanagement, d[i].RankingEthics);
seriesOv.data.push([datadate, d[i].Ranking]);
seriesEn.data.push([datadate, d[i].RankingEnvironmental]);
seriesSc.data.push([datadate, d[i].RankingSocial]);
seriesEc.data.push([datadate, d[i].RankingEconomic]);
seriesQm.data.push([datadate, d[i].RankingQualitymanagement]);
seriesEt.data.push([datadate, d[i].RankingEthics]);
}
$(el).append(history_template.format(html));
var uid = "{0}-{1}".format(nd.StandardId, nd.UserReportId);
$(el).append('<div class="history_graph" id="graph-' + uid + '"></div>');
$(el).append(overview_template.format(
nd.Ranking.toFixed(0)
, nd.RankingEnvironmental.toFixed(0)
, nd.RankingSocial.toFixed(0)
, nd.RankingEconomic.toFixed(0)
, nd.RankingQualitymanagement.toFixed(0)
, nd.RankingEthics.toFixed(0)
, formatDate(fixJsonDate(nd.DateCreated))
, nd.NumberPass
, nd.NumberAlmost
, nd.NumberFail
, nd.NumberPass + nd.NumberAlmost + nd.NumberFail
));
$('#graph-' + uid).highcharts({
chart: {
zoomType: 'x',
type: 'line'
},
colors : [
'#36A7E9',
'#95a644',
'#636363',
'#172944',
'#C1413B',
'#8F0063',
'#424884',
'#789C3C',
'#5695AF'
],
title: {
text: 'Compliance history'
}/*,
subtitle: {
text: 'Progress over time'
}*/,
xAxis: {
type: 'datetime',
dateTimeLabelFormats: {
second: '%Y-%m-%d',
minute: '%Y-%m-%d',
hour: '%Y-%m-%d',
day: '%Y %m-%d',
week: '%Y %m-%d',
month: '%Y-%m',
year: '%Y'
}/*,
title: {
text: 'Date'
}*/
},
yAxis: {
title: {
text: 'Compliance (%)'
},
min: 0,
max: 100
},
tooltip: {
headerFormat: '<b>{series.name}</b><br>',
pointFormat: '{point.x:%e. %b}: {point.y:.0f} %'
},
series: [seriesOv,seriesEn,seriesSc,seriesEc,seriesQm,seriesEt]
});
}
$(function () {
$.getJSON('http://www.highcharts.com/samples/data/jsonp.php?filename=world-population-history.csv&callback=?', function (csv) {
// Parse the CSV Data
/*Highcharts.data({
csv: data,
switchRowsAndColumns: true,
parsed: function () {
console.log(this.columns);
}
});*/
// Very simple and case-specific CSV string splitting
function CSVtoArray(text) {
| return text.replace(/^"/, '')
.replace(/",$/, '')
.split('","');
};
| identifier_body |
|
marketplace.js | type: 'POST'
});
}
var getCompanyDetails = function (companyId, profileId, languageId, callback) {
$.ajax({
url: './handlers/MarketplaceCompanyDetailsHandler.ashx',
data: { company: companyId, profile: profileId, language: languageId },
success: function (d) {
if (callback) callback(d);
},
dataType: 'json',
type: 'POST'
});
}
$(function () {
$('.form-group select').selectize({ clearOnOpen: true }).on('change', filterSearch);
$('#CompanyName').on('change', filterSearch);
$('#Filters input.percentslider')
.slider({tooltip: 'show',selection: 'after', orientation: 'horizontal', step: 5, min: -1, max: 100, value: -1})
.on('slide', function(ev){ $(ev.currentTarget).closest('.percentageFilter').find('span.percentage').text(-1 < ev.value ? ev.value + '%' : 'na');})
.on('slideStop', filterSearch)
.closest('.percentageFilter')
.append('<span class="percentage">na</span>');
filterSearch();
});
function max(a, b)
{
if (!a || !isNumber(a)) return b;
if (!b || !isNumber(b)) return a;
return Math.max(a, b);
}
function isNumber(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
function filterSearch() {
var fs = {};
fs.CompanyName = '';
fs.Products = [];
fs.ProductCategories = []
fs.ProducingCountries = [];
fs.ProducingRegions = [];
fs.StandardsCertified = []
fs.StandardsAssessed = [];
fs.Compliance = {};
fs.Compliance.Overall = max(-1, $('#overallPercentage').val());
fs.Compliance.Environment = max(-1, $('#environmentPercentage').val());
fs.Compliance.Social = max(-1, $('#sociallPercentage').val());
fs.Compliance.Economic = max(-1, $('#economicPercentage').val());
fs.Compliance.Quality = max(-1, $('#qualityPercentage').val());
fs.Compliance.BusinessIntegrity = max(-1, $('#businessIntegrityPercentage').val());
var sd;
sd = $('#CompanyName').val();
if (sd && 0 < sd.length) {
fs.CompanyName = sd;
}
sd = $('#DropDownProduct').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
var p = o.split(':')[2];
if (p && 0 < p) fs.Products.push(p);
else fs.ProductCategories.push(o.split(':')[0]);
});
}
sd = $('#DropDownProducingCountry').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
var p = o.split(':')[1];
if (p && 0 < p) fs.ProducingCountries.push(p);
else fs.ProducingRegions.push(o.split(':')[0]);
});
}
sd = $('#StandardsCertifiedList').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
fs.StandardsCertified.push(o);
});
}
sd = $('#StandardsAssessedList').val();
if (sd && 0 < sd.length) {
sd.forEach(function (o, i) {
fs.StandardsAssessed.push(o);
});
}
searchReports(fs, buildResultsTable);
}
var buildResultsTable = function (d) {
var results_container = $('#ResultsTable tbody');
var standard_template = '<span class="standard" data-id="{0}" data-name="{1}" data-logolarge="{3}"><a href="marketplace.aspx#"><img src="http://search.standardsmap.org:8080/{2}" title="{1}" width="45" height="45" /></a></span>';
var row_template = '<tr class="report_result" data-id="{0}"><td class="company"><a href="marketplace.aspx#">{1}</a></td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td><td><a href="http://search.standardsmap.org:8080/reportfinal.aspx?report={0}">View</a></td></tr>';
var report_template = '<tr class="standard_result" data-id="{0}" id="{3}-{0}"><td colspan="6"><span class="standard_title"><img src="http://search.standardsmap.org:8080/{2}" width="90" height="90" /><h4>{1}</h4></span><span id="data-{3}-{0}"></span></td></tr>';
var company_template = '<tr class="company_result" data-id="{0}" id="{1}-{0}"><td colspan="6"><span id="company-{1}-{0}"></span></td></tr>';
results_container.empty();
if (d) {
var rpt_list = [];
d.forEach(function (r, i) {
rpt_list.push(r.ReportID);
var standards = '';
if (r.Standards) {
r.Standards.forEach(function (s, i) { standards += standard_template.format(s.Id, s.Name, s.LogoSmall, s.LogoLarge); });
}
var row = $(row_template.format(r.ReportID, r.CompanyName, r.Product, r.Country, standards, formatDate(fixJsonDate(r.Modified))));
row.find('td.company a').on('click', function (ev) {
ev.preventDefault();
var result = $(ev.currentTarget).closest('.report_result');
var cmp_row = results_container.find('#' + r.ReportID + '-' + r.CompanyID + '.company_result');
// does the row exist? if so toggle visibility
if (cmp_row.length) {
cmp_row.toggle();
}
else {
cmp_row = result.after(company_template.format(r.CompanyID, r.ReportID));
getCompanyDetails(r.CompanyID, 0, 1, function (d) {
showCompanyDetails(d, '#company-' + r.ReportID + '-' + r.CompanyID);
});
}
});
results_container.append(row);
});
results_container.find('.standard a').on('click', function (ev) {
ev.preventDefault();
var std = $(ev.currentTarget).closest('.standard');
var result = std.closest('.report_result');
var id = std.attr('data-id');
var name = std.attr('data-name');
var logo = std.attr('data-logolarge');
var rid = result.attr('data-id');
var std_row = results_container.find('#' + rid + '-' + id + '.standard_result');
// does the row exist? if so toggle visibility
if (std_row.length) {
std_row.toggle();
}
else {
var cmp_row = result.next('.company_result');
if (cmp_row.length) {
std_row = cmp_row.after(report_template.format(id, name, logo, rid));
}
else {
std_row = result.after(report_template.format(id, name, logo, rid));
}
getStandardResults(rid, id, 0, function (d) {
showStandardResults(d.Data, '#data-' + rid + '-' + id);
});
}
});
$('#ReportIDs').val(rpt_list.join(','));
}
}
var fixJsonDate = function (dt) {
var date = new Date(parseInt(dt.replace(/[^0-9 +]/g, '')));
return date;
}
var formatDate = function (d) {
var dd = d.getDate();
if ( dd < 10 ) dd = '0' + dd;
var mm = d.getMonth()+1;
if ( mm < 10 ) mm = '0' + mm;
var yy = d.getFullYear() % 100;
if ( yy < 10 ) yy = '0' + yy;
return dd+'/'+mm+'/'+yy;
}
var showCompanyDetails = function (d, el) {
var template = '<span class="standard_title"><img src="http://search.standardsmap.org:8080/{5}" width="90" height="90"><h4>{1}</h4></span><span class="company_details"><p class="lead">{2}</p><p>Products/Services: {6}<br />Destination markets: {7}</p><p>Contact: <a href="mailto:{4}">{4}</a><br />Website: <a href="http://search.standardsmap.org:8080/{3}" target="_blank">{3}</a></p></span>{8}';
var standards = '';
var standard_template = '<span class="standard" data-id="{0}" data-name="{1}" data-logolarge="{3}"><img src="http://search.standardsmap.org:8080/{2}" title="{1}" alt="{1}" width="45" height="45" /></span>';
if (d.Standards) {
standards = '<span class="stand | data: { report: reportId, standard: standardId, profile: profileId },
success: function (d) {
if (callback) callback(d);
},
dataType: 'json', | random_line_split |
|
copy.go | - If the root node is a manifest list, it will be mapped to the first
// matching manifest if exists, otherwise ErrNotFound will be returned.
// - Otherwise ErrUnsupported will be returned.
func (opts *CopyOptions) WithTargetPlatform(p *ocispec.Platform) {
if p == nil {
return
}
mapRoot := opts.MapRoot
opts.MapRoot = func(ctx context.Context, src content.ReadOnlyStorage, root ocispec.Descriptor) (desc ocispec.Descriptor, err error) {
if mapRoot != nil {
if root, err = mapRoot(ctx, src, root); err != nil {
return ocispec.Descriptor{}, err
}
}
return platform.SelectManifest(ctx, src, root, p)
}
}
// defaultCopyMaxMetadataBytes is the default value of
// CopyGraphOptions.MaxMetadataBytes.
const defaultCopyMaxMetadataBytes int64 = 4 * 1024 * 1024 // 4 MiB
// DefaultCopyGraphOptions provides the default CopyGraphOptions.
var DefaultCopyGraphOptions CopyGraphOptions
// CopyGraphOptions contains parameters for [oras.CopyGraph].
type CopyGraphOptions struct {
// Concurrency limits the maximum number of concurrent copy tasks.
// If less than or equal to 0, a default (currently 3) is used.
Concurrency int
// MaxMetadataBytes limits the maximum size of the metadata that can be
// cached in the memory.
// If less than or equal to 0, a default (currently 4 MiB) is used.
MaxMetadataBytes int64
// PreCopy handles the current descriptor before copying it.
PreCopy func(ctx context.Context, desc ocispec.Descriptor) error
// PostCopy handles the current descriptor after copying it.
PostCopy func(ctx context.Context, desc ocispec.Descriptor) error
// OnCopySkipped will be called when the sub-DAG rooted by the current node
// is skipped.
OnCopySkipped func(ctx context.Context, desc ocispec.Descriptor) error
// FindSuccessors finds the successors of the current node.
// fetcher provides cached access to the source storage, and is suitable
// for fetching non-leaf nodes like manifests. Since anything fetched from
// fetcher will be cached in the memory, it is recommended to use original
// source storage to fetch large blobs.
// If FindSuccessors is nil, content.Successors will be used.
FindSuccessors func(ctx context.Context, fetcher content.Fetcher, desc ocispec.Descriptor) ([]ocispec.Descriptor, error)
}
// Copy copies a rooted directed acyclic graph (DAG) with the tagged root node
// in the source Target to the destination Target.
// The destination reference will be the same as the source reference if the
// destination reference is left blank.
//
// Returns the descriptor of the root node on successful copy.
func Copy(ctx context.Context, src ReadOnlyTarget, srcRef string, dst Target, dstRef string, opts CopyOptions) (ocispec.Descriptor, error) {
if src == nil {
return ocispec.Descriptor{}, errors.New("nil source target")
}
if dst == nil {
return ocispec.Descriptor{}, errors.New("nil destination target")
}
if dstRef == "" {
dstRef = srcRef
}
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy := cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
root, err := resolveRoot(ctx, src, srcRef, proxy)
if err != nil {
return ocispec.Descriptor{}, fmt.Errorf("failed to resolve %s: %w", srcRef, err)
}
if opts.MapRoot != nil {
proxy.StopCaching = true
root, err = opts.MapRoot(ctx, proxy, root)
if err != nil {
return ocispec.Descriptor{}, err
}
proxy.StopCaching = false
}
if err := prepareCopy(ctx, dst, dstRef, proxy, root, &opts); err != nil {
return ocispec.Descriptor{}, err
}
if err := copyGraph(ctx, src, dst, root, proxy, nil, nil, opts.CopyGraphOptions); err != nil {
return ocispec.Descriptor{}, err
}
return root, nil
}
// CopyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS.
func CopyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor, opts CopyGraphOptions) error {
return copyGraph(ctx, src, dst, root, nil, nil, nil, opts)
}
// copyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS with specified caching, concurrency limiter and tracker.
func copyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor,
proxy *cas.Proxy, limiter *semaphore.Weighted, tracker *status.Tracker, opts CopyGraphOptions) error {
if proxy == nil {
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy = cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
}
if limiter == nil {
// if Concurrency is not set or invalid, use the default concurrency
if opts.Concurrency <= 0 {
opts.Concurrency = defaultConcurrency
}
limiter = semaphore.NewWeighted(int64(opts.Concurrency))
}
if tracker == nil {
// track content status
tracker = status.NewTracker()
}
// if FindSuccessors is not provided, use the default one
if opts.FindSuccessors == nil {
opts.FindSuccessors = content.Successors
}
// traverse the graph
var fn syncutil.GoFunc[ocispec.Descriptor]
fn = func(ctx context.Context, region *syncutil.LimitedRegion, desc ocispec.Descriptor) (err error) {
// skip the descriptor if other go routine is working on it
done, committed := tracker.TryCommit(desc)
if !committed {
return nil
}
defer func() {
if err == nil {
// mark the content as done on success
close(done)
}
}()
// skip if a rooted sub-DAG exists
exists, err := dst.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
if opts.OnCopySkipped != nil {
if err := opts.OnCopySkipped(ctx, desc); err != nil {
return err
}
}
return nil
}
// find successors while non-leaf nodes will be fetched and cached
successors, err := opts.FindSuccessors(ctx, proxy, desc)
if err != nil {
return err
}
successors = removeForeignLayers(successors)
if len(successors) != 0 {
// for non-leaf nodes, process successors and wait for them to complete
region.End()
if err := syncutil.Go(ctx, limiter, fn, successors...); err != nil {
return err
}
for _, node := range successors {
done, committed := tracker.TryCommit(node)
if committed {
return fmt.Errorf("%s: %s: successor not committed", desc.Digest, node.Digest)
}
select {
case <-done:
case <-ctx.Done():
return ctx.Err()
}
}
if err := region.Start(); err != nil {
return err
}
}
exists, err = proxy.Cache.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
return copyNode(ctx, proxy.Cache, dst, desc, opts)
}
return copyNode(ctx, src, dst, desc, opts)
}
return syncutil.Go(ctx, limiter, fn, root)
}
// doCopyNode copies a single content from the source CAS to the destination CAS.
func doCopyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor) error {
rc, err := src.Fetch(ctx, desc)
if err != nil {
return err
}
defer rc.Close()
err = dst.Push(ctx, desc, rc)
if err != nil && !errors.Is(err, errdef.ErrAlreadyExists) {
return err
}
return nil
}
// copyNode copies a single content from the source CAS to the destination CAS,
// and apply the given options.
func copyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor, opts CopyGraphOptions) error | {
if opts.PreCopy != nil {
if err := opts.PreCopy(ctx, desc); err != nil {
if err == errSkipDesc {
return nil
}
return err
}
}
if err := doCopyNode(ctx, src, dst, desc); err != nil {
return err
}
if opts.PostCopy != nil {
return opts.PostCopy(ctx, desc)
}
return nil
} | identifier_body |
|
copy.go | , error)
}
// WithTargetPlatform configures opts.MapRoot to select the manifest whose
// platform matches the given platform. When MapRoot is provided, the platform
// selection will be applied on the mapped root node.
// - If the given platform is nil, no platform selection will be applied.
// - If the root node is a manifest, it will remain the same if platform
// matches, otherwise ErrNotFound will be returned.
// - If the root node is a manifest list, it will be mapped to the first
// matching manifest if exists, otherwise ErrNotFound will be returned.
// - Otherwise ErrUnsupported will be returned.
func (opts *CopyOptions) WithTargetPlatform(p *ocispec.Platform) {
if p == nil {
return
}
mapRoot := opts.MapRoot
opts.MapRoot = func(ctx context.Context, src content.ReadOnlyStorage, root ocispec.Descriptor) (desc ocispec.Descriptor, err error) {
if mapRoot != nil {
if root, err = mapRoot(ctx, src, root); err != nil {
return ocispec.Descriptor{}, err
}
}
return platform.SelectManifest(ctx, src, root, p)
}
}
// defaultCopyMaxMetadataBytes is the default value of
// CopyGraphOptions.MaxMetadataBytes.
const defaultCopyMaxMetadataBytes int64 = 4 * 1024 * 1024 // 4 MiB
// DefaultCopyGraphOptions provides the default CopyGraphOptions.
var DefaultCopyGraphOptions CopyGraphOptions
// CopyGraphOptions contains parameters for [oras.CopyGraph].
type CopyGraphOptions struct {
// Concurrency limits the maximum number of concurrent copy tasks.
// If less than or equal to 0, a default (currently 3) is used.
Concurrency int
// MaxMetadataBytes limits the maximum size of the metadata that can be
// cached in the memory.
// If less than or equal to 0, a default (currently 4 MiB) is used.
MaxMetadataBytes int64
// PreCopy handles the current descriptor before copying it.
PreCopy func(ctx context.Context, desc ocispec.Descriptor) error
// PostCopy handles the current descriptor after copying it.
PostCopy func(ctx context.Context, desc ocispec.Descriptor) error
// OnCopySkipped will be called when the sub-DAG rooted by the current node
// is skipped.
OnCopySkipped func(ctx context.Context, desc ocispec.Descriptor) error
// FindSuccessors finds the successors of the current node.
// fetcher provides cached access to the source storage, and is suitable
// for fetching non-leaf nodes like manifests. Since anything fetched from
// fetcher will be cached in the memory, it is recommended to use original
// source storage to fetch large blobs.
// If FindSuccessors is nil, content.Successors will be used.
FindSuccessors func(ctx context.Context, fetcher content.Fetcher, desc ocispec.Descriptor) ([]ocispec.Descriptor, error)
}
// Copy copies a rooted directed acyclic graph (DAG) with the tagged root node
// in the source Target to the destination Target.
// The destination reference will be the same as the source reference if the
// destination reference is left blank.
//
// Returns the descriptor of the root node on successful copy.
func Copy(ctx context.Context, src ReadOnlyTarget, srcRef string, dst Target, dstRef string, opts CopyOptions) (ocispec.Descriptor, error) {
if src == nil {
return ocispec.Descriptor{}, errors.New("nil source target")
}
if dst == nil {
return ocispec.Descriptor{}, errors.New("nil destination target")
}
if dstRef == "" {
dstRef = srcRef
}
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy := cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
root, err := resolveRoot(ctx, src, srcRef, proxy)
if err != nil {
return ocispec.Descriptor{}, fmt.Errorf("failed to resolve %s: %w", srcRef, err)
}
if opts.MapRoot != nil {
proxy.StopCaching = true
root, err = opts.MapRoot(ctx, proxy, root)
if err != nil {
return ocispec.Descriptor{}, err
}
proxy.StopCaching = false
}
if err := prepareCopy(ctx, dst, dstRef, proxy, root, &opts); err != nil {
return ocispec.Descriptor{}, err
}
if err := copyGraph(ctx, src, dst, root, proxy, nil, nil, opts.CopyGraphOptions); err != nil {
return ocispec.Descriptor{}, err
}
return root, nil
}
// CopyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS.
func CopyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor, opts CopyGraphOptions) error {
return copyGraph(ctx, src, dst, root, nil, nil, nil, opts)
}
// copyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS with specified caching, concurrency limiter and tracker.
func copyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor,
proxy *cas.Proxy, limiter *semaphore.Weighted, tracker *status.Tracker, opts CopyGraphOptions) error {
if proxy == nil {
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy = cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
}
if limiter == nil {
// if Concurrency is not set or invalid, use the default concurrency
if opts.Concurrency <= 0 {
opts.Concurrency = defaultConcurrency
}
limiter = semaphore.NewWeighted(int64(opts.Concurrency))
}
if tracker == nil {
// track content status
tracker = status.NewTracker()
}
// if FindSuccessors is not provided, use the default one
if opts.FindSuccessors == nil {
opts.FindSuccessors = content.Successors
}
// traverse the graph
var fn syncutil.GoFunc[ocispec.Descriptor]
fn = func(ctx context.Context, region *syncutil.LimitedRegion, desc ocispec.Descriptor) (err error) {
// skip the descriptor if other go routine is working on it
done, committed := tracker.TryCommit(desc)
if !committed {
return nil
}
defer func() {
if err == nil {
// mark the content as done on success
close(done)
}
}()
// skip if a rooted sub-DAG exists
exists, err := dst.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
if opts.OnCopySkipped != nil {
if err := opts.OnCopySkipped(ctx, desc); err != nil {
return err
}
}
return nil
}
// find successors while non-leaf nodes will be fetched and cached
successors, err := opts.FindSuccessors(ctx, proxy, desc)
if err != nil {
return err
}
successors = removeForeignLayers(successors)
if len(successors) != 0 {
// for non-leaf nodes, process successors and wait for them to complete
region.End()
if err := syncutil.Go(ctx, limiter, fn, successors...); err != nil {
return err
}
for _, node := range successors {
done, committed := tracker.TryCommit(node)
if committed {
return fmt.Errorf("%s: %s: successor not committed", desc.Digest, node.Digest)
}
select {
case <-done:
case <-ctx.Done():
return ctx.Err()
}
}
if err := region.Start(); err != nil {
return err
}
}
exists, err = proxy.Cache.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
return copyNode(ctx, proxy.Cache, dst, desc, opts)
}
return copyNode(ctx, src, dst, desc, opts)
}
return syncutil.Go(ctx, limiter, fn, root)
}
// doCopyNode copies a single content from the source CAS to the destination CAS.
func | (ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor) error {
rc, err := src.Fetch(ctx, desc)
if err != nil {
return err
}
defer rc.Close()
err = dst.Push(ctx, desc, rc)
if err != nil && !errors.Is(err, errdef.ErrAlreadyExists) {
return err
}
return nil
}
// copyNode copies a single content from the source CAS to the destination CAS,
// and apply the given options.
func copyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor, opts CopyGraphOptions) | doCopyNode | identifier_name |
copy.go | error)
}
// WithTargetPlatform configures opts.MapRoot to select the manifest whose
// platform matches the given platform. When MapRoot is provided, the platform
// selection will be applied on the mapped root node.
// - If the given platform is nil, no platform selection will be applied.
// - If the root node is a manifest, it will remain the same if platform
// matches, otherwise ErrNotFound will be returned.
// - If the root node is a manifest list, it will be mapped to the first
// matching manifest if exists, otherwise ErrNotFound will be returned.
// - Otherwise ErrUnsupported will be returned.
func (opts *CopyOptions) WithTargetPlatform(p *ocispec.Platform) {
if p == nil {
return
}
mapRoot := opts.MapRoot
opts.MapRoot = func(ctx context.Context, src content.ReadOnlyStorage, root ocispec.Descriptor) (desc ocispec.Descriptor, err error) {
if mapRoot != nil {
if root, err = mapRoot(ctx, src, root); err != nil {
return ocispec.Descriptor{}, err
}
}
return platform.SelectManifest(ctx, src, root, p)
}
}
// defaultCopyMaxMetadataBytes is the default value of
// CopyGraphOptions.MaxMetadataBytes.
const defaultCopyMaxMetadataBytes int64 = 4 * 1024 * 1024 // 4 MiB
// DefaultCopyGraphOptions provides the default CopyGraphOptions.
var DefaultCopyGraphOptions CopyGraphOptions
// CopyGraphOptions contains parameters for [oras.CopyGraph].
type CopyGraphOptions struct {
// Concurrency limits the maximum number of concurrent copy tasks.
// If less than or equal to 0, a default (currently 3) is used.
Concurrency int
// MaxMetadataBytes limits the maximum size of the metadata that can be
// cached in the memory.
// If less than or equal to 0, a default (currently 4 MiB) is used.
MaxMetadataBytes int64
// PreCopy handles the current descriptor before copying it.
PreCopy func(ctx context.Context, desc ocispec.Descriptor) error
// PostCopy handles the current descriptor after copying it.
PostCopy func(ctx context.Context, desc ocispec.Descriptor) error
// OnCopySkipped will be called when the sub-DAG rooted by the current node
// is skipped.
OnCopySkipped func(ctx context.Context, desc ocispec.Descriptor) error
// FindSuccessors finds the successors of the current node.
// fetcher provides cached access to the source storage, and is suitable
// for fetching non-leaf nodes like manifests. Since anything fetched from
// fetcher will be cached in the memory, it is recommended to use original
// source storage to fetch large blobs.
// If FindSuccessors is nil, content.Successors will be used.
FindSuccessors func(ctx context.Context, fetcher content.Fetcher, desc ocispec.Descriptor) ([]ocispec.Descriptor, error)
}
// Copy copies a rooted directed acyclic graph (DAG) with the tagged root node
// in the source Target to the destination Target.
// The destination reference will be the same as the source reference if the
// destination reference is left blank.
//
// Returns the descriptor of the root node on successful copy.
func Copy(ctx context.Context, src ReadOnlyTarget, srcRef string, dst Target, dstRef string, opts CopyOptions) (ocispec.Descriptor, error) {
if src == nil {
return ocispec.Descriptor{}, errors.New("nil source target")
}
if dst == nil {
return ocispec.Descriptor{}, errors.New("nil destination target")
}
if dstRef == "" {
dstRef = srcRef
}
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy := cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
root, err := resolveRoot(ctx, src, srcRef, proxy)
if err != nil {
return ocispec.Descriptor{}, fmt.Errorf("failed to resolve %s: %w", srcRef, err)
}
if opts.MapRoot != nil {
proxy.StopCaching = true
root, err = opts.MapRoot(ctx, proxy, root)
if err != nil {
return ocispec.Descriptor{}, err
}
proxy.StopCaching = false
}
if err := prepareCopy(ctx, dst, dstRef, proxy, root, &opts); err != nil {
return ocispec.Descriptor{}, err
}
if err := copyGraph(ctx, src, dst, root, proxy, nil, nil, opts.CopyGraphOptions); err != nil {
return ocispec.Descriptor{}, err
}
return root, nil
}
// CopyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS.
func CopyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor, opts CopyGraphOptions) error {
return copyGraph(ctx, src, dst, root, nil, nil, nil, opts)
}
// copyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS with specified caching, concurrency limiter and tracker.
func copyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor,
proxy *cas.Proxy, limiter *semaphore.Weighted, tracker *status.Tracker, opts CopyGraphOptions) error {
if proxy == nil {
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy = cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
}
if limiter == nil {
// if Concurrency is not set or invalid, use the default concurrency
if opts.Concurrency <= 0 {
opts.Concurrency = defaultConcurrency
}
limiter = semaphore.NewWeighted(int64(opts.Concurrency))
}
if tracker == nil {
// track content status
tracker = status.NewTracker()
}
// if FindSuccessors is not provided, use the default one
if opts.FindSuccessors == nil {
opts.FindSuccessors = content.Successors
}
// traverse the graph
var fn syncutil.GoFunc[ocispec.Descriptor]
fn = func(ctx context.Context, region *syncutil.LimitedRegion, desc ocispec.Descriptor) (err error) {
// skip the descriptor if other go routine is working on it
done, committed := tracker.TryCommit(desc)
if !committed {
return nil
}
defer func() {
if err == nil {
// mark the content as done on success
close(done)
}
}()
// skip if a rooted sub-DAG exists
exists, err := dst.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
if opts.OnCopySkipped != nil {
if err := opts.OnCopySkipped(ctx, desc); err != nil |
}
return nil
}
// find successors while non-leaf nodes will be fetched and cached
successors, err := opts.FindSuccessors(ctx, proxy, desc)
if err != nil {
return err
}
successors = removeForeignLayers(successors)
if len(successors) != 0 {
// for non-leaf nodes, process successors and wait for them to complete
region.End()
if err := syncutil.Go(ctx, limiter, fn, successors...); err != nil {
return err
}
for _, node := range successors {
done, committed := tracker.TryCommit(node)
if committed {
return fmt.Errorf("%s: %s: successor not committed", desc.Digest, node.Digest)
}
select {
case <-done:
case <-ctx.Done():
return ctx.Err()
}
}
if err := region.Start(); err != nil {
return err
}
}
exists, err = proxy.Cache.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
return copyNode(ctx, proxy.Cache, dst, desc, opts)
}
return copyNode(ctx, src, dst, desc, opts)
}
return syncutil.Go(ctx, limiter, fn, root)
}
// doCopyNode copies a single content from the source CAS to the destination CAS.
func doCopyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor) error {
rc, err := src.Fetch(ctx, desc)
if err != nil {
return err
}
defer rc.Close()
err = dst.Push(ctx, desc, rc)
if err != nil && !errors.Is(err, errdef.ErrAlreadyExists) {
return err
}
return nil
}
// copyNode copies a single content from the source CAS to the destination CAS,
// and apply the given options.
func copyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor, opts CopyGraphOptions) | {
return err
} | conditional_block |
copy.go | // reference will be passed to MapRoot, and the mapped descriptor will be
// used as the root node for copy.
MapRoot func(ctx context.Context, src content.ReadOnlyStorage, root ocispec.Descriptor) (ocispec.Descriptor, error)
}
// WithTargetPlatform configures opts.MapRoot to select the manifest whose
// platform matches the given platform. When MapRoot is provided, the platform
// selection will be applied on the mapped root node.
// - If the given platform is nil, no platform selection will be applied.
// - If the root node is a manifest, it will remain the same if platform
// matches, otherwise ErrNotFound will be returned.
// - If the root node is a manifest list, it will be mapped to the first
// matching manifest if exists, otherwise ErrNotFound will be returned.
// - Otherwise ErrUnsupported will be returned.
func (opts *CopyOptions) WithTargetPlatform(p *ocispec.Platform) {
if p == nil {
return
}
mapRoot := opts.MapRoot
opts.MapRoot = func(ctx context.Context, src content.ReadOnlyStorage, root ocispec.Descriptor) (desc ocispec.Descriptor, err error) {
if mapRoot != nil {
if root, err = mapRoot(ctx, src, root); err != nil {
return ocispec.Descriptor{}, err
}
}
return platform.SelectManifest(ctx, src, root, p)
}
}
// defaultCopyMaxMetadataBytes is the default value of
// CopyGraphOptions.MaxMetadataBytes.
const defaultCopyMaxMetadataBytes int64 = 4 * 1024 * 1024 // 4 MiB
// DefaultCopyGraphOptions provides the default CopyGraphOptions.
var DefaultCopyGraphOptions CopyGraphOptions
// CopyGraphOptions contains parameters for [oras.CopyGraph].
type CopyGraphOptions struct {
// Concurrency limits the maximum number of concurrent copy tasks.
// If less than or equal to 0, a default (currently 3) is used.
Concurrency int
// MaxMetadataBytes limits the maximum size of the metadata that can be
// cached in the memory.
// If less than or equal to 0, a default (currently 4 MiB) is used.
MaxMetadataBytes int64
// PreCopy handles the current descriptor before copying it.
PreCopy func(ctx context.Context, desc ocispec.Descriptor) error
// PostCopy handles the current descriptor after copying it.
PostCopy func(ctx context.Context, desc ocispec.Descriptor) error
// OnCopySkipped will be called when the sub-DAG rooted by the current node
// is skipped.
OnCopySkipped func(ctx context.Context, desc ocispec.Descriptor) error
// FindSuccessors finds the successors of the current node.
// fetcher provides cached access to the source storage, and is suitable
// for fetching non-leaf nodes like manifests. Since anything fetched from
// fetcher will be cached in the memory, it is recommended to use original
// source storage to fetch large blobs.
// If FindSuccessors is nil, content.Successors will be used.
FindSuccessors func(ctx context.Context, fetcher content.Fetcher, desc ocispec.Descriptor) ([]ocispec.Descriptor, error)
}
// Copy copies a rooted directed acyclic graph (DAG) with the tagged root node
// in the source Target to the destination Target.
// The destination reference will be the same as the source reference if the
// destination reference is left blank.
//
// Returns the descriptor of the root node on successful copy.
func Copy(ctx context.Context, src ReadOnlyTarget, srcRef string, dst Target, dstRef string, opts CopyOptions) (ocispec.Descriptor, error) {
if src == nil {
return ocispec.Descriptor{}, errors.New("nil source target")
}
if dst == nil {
return ocispec.Descriptor{}, errors.New("nil destination target")
}
if dstRef == "" {
dstRef = srcRef
}
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy := cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
root, err := resolveRoot(ctx, src, srcRef, proxy)
if err != nil {
return ocispec.Descriptor{}, fmt.Errorf("failed to resolve %s: %w", srcRef, err)
}
if opts.MapRoot != nil {
proxy.StopCaching = true
root, err = opts.MapRoot(ctx, proxy, root)
if err != nil {
return ocispec.Descriptor{}, err
}
proxy.StopCaching = false
}
if err := prepareCopy(ctx, dst, dstRef, proxy, root, &opts); err != nil {
return ocispec.Descriptor{}, err
}
if err := copyGraph(ctx, src, dst, root, proxy, nil, nil, opts.CopyGraphOptions); err != nil {
return ocispec.Descriptor{}, err
}
return root, nil
}
// CopyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS.
func CopyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor, opts CopyGraphOptions) error {
return copyGraph(ctx, src, dst, root, nil, nil, nil, opts)
}
// copyGraph copies a rooted directed acyclic graph (DAG) from the source CAS to
// the destination CAS with specified caching, concurrency limiter and tracker.
func copyGraph(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, root ocispec.Descriptor,
proxy *cas.Proxy, limiter *semaphore.Weighted, tracker *status.Tracker, opts CopyGraphOptions) error {
if proxy == nil {
// use caching proxy on non-leaf nodes
if opts.MaxMetadataBytes <= 0 {
opts.MaxMetadataBytes = defaultCopyMaxMetadataBytes
}
proxy = cas.NewProxyWithLimit(src, cas.NewMemory(), opts.MaxMetadataBytes)
}
if limiter == nil {
// if Concurrency is not set or invalid, use the default concurrency
if opts.Concurrency <= 0 {
opts.Concurrency = defaultConcurrency
}
limiter = semaphore.NewWeighted(int64(opts.Concurrency))
}
if tracker == nil {
// track content status
tracker = status.NewTracker()
}
// if FindSuccessors is not provided, use the default one
if opts.FindSuccessors == nil {
opts.FindSuccessors = content.Successors
}
// traverse the graph
var fn syncutil.GoFunc[ocispec.Descriptor]
fn = func(ctx context.Context, region *syncutil.LimitedRegion, desc ocispec.Descriptor) (err error) {
// skip the descriptor if other go routine is working on it
done, committed := tracker.TryCommit(desc)
if !committed {
return nil
}
defer func() {
if err == nil {
// mark the content as done on success
close(done)
}
}()
// skip if a rooted sub-DAG exists
exists, err := dst.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
if opts.OnCopySkipped != nil {
if err := opts.OnCopySkipped(ctx, desc); err != nil {
return err
}
}
return nil
}
// find successors while non-leaf nodes will be fetched and cached
successors, err := opts.FindSuccessors(ctx, proxy, desc)
if err != nil {
return err
}
successors = removeForeignLayers(successors)
if len(successors) != 0 {
// for non-leaf nodes, process successors and wait for them to complete
region.End()
if err := syncutil.Go(ctx, limiter, fn, successors...); err != nil {
return err
}
for _, node := range successors {
done, committed := tracker.TryCommit(node)
if committed {
return fmt.Errorf("%s: %s: successor not committed", desc.Digest, node.Digest)
}
select {
case <-done:
case <-ctx.Done():
return ctx.Err()
}
}
if err := region.Start(); err != nil {
return err
}
}
exists, err = proxy.Cache.Exists(ctx, desc)
if err != nil {
return err
}
if exists {
return copyNode(ctx, proxy.Cache, dst, desc, opts)
}
return copyNode(ctx, src, dst, desc, opts)
}
return syncutil.Go(ctx, limiter, fn, root)
}
// doCopyNode copies a single content from the source CAS to the destination CAS.
func doCopyNode(ctx context.Context, src content.ReadOnlyStorage, dst content.Storage, desc ocispec.Descriptor) error {
rc, err := src.Fetch(ctx, desc)
if err != nil {
return err
}
defer rc.Close()
err = dst.Push(ctx, desc, rc)
if err != nil && !errors.Is(err, errdef.ErrAlreadyExists) {
return err
}
return nil
}
| random_line_split |
||
alumniMap-controller.js | results"]
if (results.length > 0){
result = results[0];
lat = parseFloat(result["geometry"]["lat"]);
lon = parseFloat(result["geometry"]["lng"]);
return [lat, lon];
} else {
throw new Error('No Results');
}
})
.catch( error => console.error('Error fetching lat & lon:', error) );
} catch (err) {
console.log(`opencagedata API error: ${err}`);
return {};
}
}
async function process_results(results) {
var name_list = {}
var organized_by_alumn = {};
var location_dict = {}
results.forEach(element => {
alumn = element[1];
year = element[2];
us = element[5];
timestamp = Date.parse(element[0]);
lat = element[9]
lon = element[10]
console.log(lat,lon)
area = element[7]
console.log(area)
if (us == "Yes"){
city = element[3];
state = element[4];
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
}else{
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
} else {
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
delete organized_by_alumn[alumn];
}
}
place = element[6];
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}
});
organized_by_location = {};
for (let [alumn, val] of Object.entries(organized_by_alumn)) {
console.log("Organized by location");
loc = val.location;
console.log(loc);
if(loc in location_dict){
//
}
else{
location_dict[loc] = [val.lat,val.lon]
}
year = val.year;
alumn_with_year = alumn + " '"+year;
if (loc in organized_by_location){
organized_by_location[loc].push(alumn_with_year);
}else{
organized_by_location[loc] = [alumn_with_year];
}
}
bubble_list = []
for ( let [loc, alumn_list] of Object.entries(organized_by_location)) {
// let geo = await get_lat_and_lon(loc+", US");
let lat = location_dict[loc][0];
let lon = location_dict[loc][1];
bubble_list.push({location: loc, latitude: lat,
longitude: lon, radius: 3, fillKey: 'bubble',
alumns: alumn_list});
}
all_alumni = []
for (let [alumn, val] of Object.entries(name_list)) {
loc = val.location;
year = val.year;
list_entry = alumn + " '"+year+" • "+loc;
all_alumni.push(list_entry);
}
return [bubble_list, all_alumni];
}
async function convert_sheet_to_bubble_list(id) {
const options = {
sheetId: id,
sheetNumber: 1,
returnAllResults: true
};
return await gsheetProcessor(options, process_results);
}
async function create_bubble_list() |
function create_popup(location, alumns) {
alumns_html = 'Residents:'
alumns_html += '<br><ul class="popup">\n'
for (var i=0; i<alumns.length; i++){
alumn = alumns[i];
alumns_html += "<li>"+alumn+"</li>\n";
}
alumns_html += "</ul>"
return "<b>"+location+"</b>"+"<br>"+alumns_html;
}
function create_table(list) {
alumns_html = "<ul id='alumni' class='alumni' style='list-style: none;'><li>"
for (var i=0; i<list.length; i++){
item = list[i];
alumns_html += "<p>"+item+"</p>"
}
alumns_html += "</li></ul>"
return alumns_html;
}
//--------------------------------------------------------
/* Credit: https://github.com/bpk68/g-sheets-api */
/* Used with slight modifications */
//import GSheetsapi from './gsheetsapi.js';
function matchValues(valToMatch, valToMatchAgainst, matchingType) {
try {
if (typeof valToMatch != 'undefined') {
valToMatch = valToMatch.toLowerCase().trim();
valToMatchAgainst = valToMatchAgainst.toLowerCase().trim();
if (matchingType === 'strict') {
return valToMatch === valToMatchAgainst;
}
if (matchingType === 'loose') {
return valToMatch.includes(valToMatchAgainst) || (valToMatch == valToMatchAgainst);
}
}
} catch (e) {
console.log(`error in matchValues: ${e.message}`);
return false;
}
return false;
};
function filterResults(resultsToFilter, filter, options) {
let filteredData = [];
// now we have a list of rows, we can filter by various things
return resultsToFilter.filter(item => {
let addRow = null;
let filterMatches = [];
if (typeof item === 'undefined' ||
item.length <= 0 ||
Object.keys(item).length <= 0) {
return false;
}
Object.keys(filter).forEach(key => {
const filterValue = filter[key]; // e.g. 'archaeology'
const itemValue = item[key]; // e.g. 'department' or 'undefined'
filterMatches.push(matchValues(itemValue, filterValue, options.matching || 'loose'));
});
if (options.operator === 'or') {
addRow = filterMatches.some(match => match === true);
}
if (options.operator === 'and') {
addRow = filterMatches.every(match => match === true);
}
return addRow;
});
}
function processGSheetResults(JSONResponse, returnAllResults, filter, filterOptions) {
const data = JSONResponse.feed.entry;
const startRow = 2; // skip the header row(1), don't need it
let processedResults = [{}];
let colNames = {};
for (let item of data) {
const cell = item['gs$cell']; // gets cell data
const val = cell['$t']; // gets cell value
const columnNum = cell['col']; // gets the col number
const thisRow = cell['row']; // gets the row number
const colNameToAdd = colNames[columnNum]; // careful, this will be undefined if we hit it on the first pass
// don't add this row to the return data, but add it to list of column names
if (thisRow < startRow) {
colNames[columnNum] = val.toLowerCase();
continue; // skip the header row
}
if (typeof processedResults[thisRow] === 'undefined') {
processedResults[thisRow] = {};
}
if (typeof colNameToAdd !== 'undefined' && colNameToAdd.length > 0) {
processedResults[thisRow][colNameToAdd] = val;
}
}
// make sure we're only returning valid, filled data items
processedResults = processedResults.filter(result => Object.keys(result).length);
// if we're not filtering, | {
let id = "12lGrmIhj2dlLHt2GNucD69IktFoOA5k9Zi9rnLR0OoI";
let bubble_list = await convert_sheet_to_bubble_list(id);
return bubble_list;
} | identifier_body |
alumniMap-controller.js | results"]
if (results.length > 0){
result = results[0];
lat = parseFloat(result["geometry"]["lat"]);
lon = parseFloat(result["geometry"]["lng"]);
return [lat, lon];
} else {
throw new Error('No Results');
}
})
.catch( error => console.error('Error fetching lat & lon:', error) );
} catch (err) {
console.log(`opencagedata API error: ${err}`);
return {};
}
}
async function process_results(results) {
var name_list = {}
var organized_by_alumn = {};
var location_dict = {}
results.forEach(element => {
alumn = element[1];
year = element[2];
us = element[5];
timestamp = Date.parse(element[0]);
lat = element[9]
lon = element[10]
console.log(lat,lon)
area = element[7]
console.log(area)
if (us == "Yes"){
city = element[3];
state = element[4];
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
}else{
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
} else {
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
delete organized_by_alumn[alumn];
}
}
place = element[6];
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}
});
organized_by_location = {};
for (let [alumn, val] of Object.entries(organized_by_alumn)) {
console.log("Organized by location");
loc = val.location;
console.log(loc);
if(loc in location_dict){
//
}
else{
location_dict[loc] = [val.lat,val.lon]
}
year = val.year;
alumn_with_year = alumn + " '"+year;
if (loc in organized_by_location){
organized_by_location[loc].push(alumn_with_year);
}else{
organized_by_location[loc] = [alumn_with_year]; |
bubble_list = []
for ( let [loc, alumn_list] of Object.entries(organized_by_location)) {
// let geo = await get_lat_and_lon(loc+", US");
let lat = location_dict[loc][0];
let lon = location_dict[loc][1];
bubble_list.push({location: loc, latitude: lat,
longitude: lon, radius: 3, fillKey: 'bubble',
alumns: alumn_list});
}
all_alumni = []
for (let [alumn, val] of Object.entries(name_list)) {
loc = val.location;
year = val.year;
list_entry = alumn + " '"+year+" • "+loc;
all_alumni.push(list_entry);
}
return [bubble_list, all_alumni];
}
async function convert_sheet_to_bubble_list(id) {
const options = {
sheetId: id,
sheetNumber: 1,
returnAllResults: true
};
return await gsheetProcessor(options, process_results);
}
async function create_bubble_list() {
let id = "12lGrmIhj2dlLHt2GNucD69IktFoOA5k9Zi9rnLR0OoI";
let bubble_list = await convert_sheet_to_bubble_list(id);
return bubble_list;
}
function create_popup(location, alumns) {
alumns_html = 'Residents:'
alumns_html += '<br><ul class="popup">\n'
for (var i=0; i<alumns.length; i++){
alumn = alumns[i];
alumns_html += "<li>"+alumn+"</li>\n";
}
alumns_html += "</ul>"
return "<b>"+location+"</b>"+"<br>"+alumns_html;
}
function create_table(list) {
alumns_html = "<ul id='alumni' class='alumni' style='list-style: none;'><li>"
for (var i=0; i<list.length; i++){
item = list[i];
alumns_html += "<p>"+item+"</p>"
}
alumns_html += "</li></ul>"
return alumns_html;
}
//--------------------------------------------------------
/* Credit: https://github.com/bpk68/g-sheets-api */
/* Used with slight modifications */
//import GSheetsapi from './gsheetsapi.js';
function matchValues(valToMatch, valToMatchAgainst, matchingType) {
try {
if (typeof valToMatch != 'undefined') {
valToMatch = valToMatch.toLowerCase().trim();
valToMatchAgainst = valToMatchAgainst.toLowerCase().trim();
if (matchingType === 'strict') {
return valToMatch === valToMatchAgainst;
}
if (matchingType === 'loose') {
return valToMatch.includes(valToMatchAgainst) || (valToMatch == valToMatchAgainst);
}
}
} catch (e) {
console.log(`error in matchValues: ${e.message}`);
return false;
}
return false;
};
function filterResults(resultsToFilter, filter, options) {
let filteredData = [];
// now we have a list of rows, we can filter by various things
return resultsToFilter.filter(item => {
let addRow = null;
let filterMatches = [];
if (typeof item === 'undefined' ||
item.length <= 0 ||
Object.keys(item).length <= 0) {
return false;
}
Object.keys(filter).forEach(key => {
const filterValue = filter[key]; // e.g. 'archaeology'
const itemValue = item[key]; // e.g. 'department' or 'undefined'
filterMatches.push(matchValues(itemValue, filterValue, options.matching || 'loose'));
});
if (options.operator === 'or') {
addRow = filterMatches.some(match => match === true);
}
if (options.operator === 'and') {
addRow = filterMatches.every(match => match === true);
}
return addRow;
});
}
function processGSheetResults(JSONResponse, returnAllResults, filter, filterOptions) {
const data = JSONResponse.feed.entry;
const startRow = 2; // skip the header row(1), don't need it
let processedResults = [{}];
let colNames = {};
for (let item of data) {
const cell = item['gs$cell']; // gets cell data
const val = cell['$t']; // gets cell value
const columnNum = cell['col']; // gets the col number
const thisRow = cell['row']; // gets the row number
const colNameToAdd = colNames[columnNum]; // careful, this will be undefined if we hit it on the first pass
// don't add this row to the return data, but add it to list of column names
if (thisRow < startRow) {
colNames[columnNum] = val.toLowerCase();
continue; // skip the header row
}
if (typeof processedResults[thisRow] === 'undefined') {
processedResults[thisRow] = {};
}
if (typeof colNameToAdd !== 'undefined' && colNameToAdd.length > 0) {
processedResults[thisRow][colNameToAdd] = val;
}
}
// make sure we're only returning valid, filled data items
processedResults = processedResults.filter(result => Object.keys(result).length);
// if we're not filtering, then | }
} | random_line_split |
alumniMap-controller.js | results"]
if (results.length > 0){
result = results[0];
lat = parseFloat(result["geometry"]["lat"]);
lon = parseFloat(result["geometry"]["lng"]);
return [lat, lon];
} else {
throw new Error('No Results');
}
})
.catch( error => console.error('Error fetching lat & lon:', error) );
} catch (err) {
console.log(`opencagedata API error: ${err}`);
return {};
}
}
async function process_results(results) {
var name_list = {}
var organized_by_alumn = {};
var location_dict = {}
results.forEach(element => {
alumn = element[1];
year = element[2];
us = element[5];
timestamp = Date.parse(element[0]);
lat = element[9]
lon = element[10]
console.log(lat,lon)
area = element[7]
console.log(area)
if (us == "Yes"){
city = element[3];
state = element[4];
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
}else{
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
} else {
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
delete organized_by_alumn[alumn];
}
}
place = element[6];
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}
});
organized_by_location = {};
for (let [alumn, val] of Object.entries(organized_by_alumn)) {
console.log("Organized by location");
loc = val.location;
console.log(loc);
if(loc in location_dict){
//
}
else{
location_dict[loc] = [val.lat,val.lon]
}
year = val.year;
alumn_with_year = alumn + " '"+year;
if (loc in organized_by_location){
organized_by_location[loc].push(alumn_with_year);
}else{
organized_by_location[loc] = [alumn_with_year];
}
}
bubble_list = []
for ( let [loc, alumn_list] of Object.entries(organized_by_location)) {
// let geo = await get_lat_and_lon(loc+", US");
let lat = location_dict[loc][0];
let lon = location_dict[loc][1];
bubble_list.push({location: loc, latitude: lat,
longitude: lon, radius: 3, fillKey: 'bubble',
alumns: alumn_list});
}
all_alumni = []
for (let [alumn, val] of Object.entries(name_list)) {
loc = val.location;
year = val.year;
list_entry = alumn + " '"+year+" • "+loc;
all_alumni.push(list_entry);
}
return [bubble_list, all_alumni];
}
async function | (id) {
const options = {
sheetId: id,
sheetNumber: 1,
returnAllResults: true
};
return await gsheetProcessor(options, process_results);
}
async function create_bubble_list() {
let id = "12lGrmIhj2dlLHt2GNucD69IktFoOA5k9Zi9rnLR0OoI";
let bubble_list = await convert_sheet_to_bubble_list(id);
return bubble_list;
}
function create_popup(location, alumns) {
alumns_html = 'Residents:'
alumns_html += '<br><ul class="popup">\n'
for (var i=0; i<alumns.length; i++){
alumn = alumns[i];
alumns_html += "<li>"+alumn+"</li>\n";
}
alumns_html += "</ul>"
return "<b>"+location+"</b>"+"<br>"+alumns_html;
}
function create_table(list) {
alumns_html = "<ul id='alumni' class='alumni' style='list-style: none;'><li>"
for (var i=0; i<list.length; i++){
item = list[i];
alumns_html += "<p>"+item+"</p>"
}
alumns_html += "</li></ul>"
return alumns_html;
}
//--------------------------------------------------------
/* Credit: https://github.com/bpk68/g-sheets-api */
/* Used with slight modifications */
//import GSheetsapi from './gsheetsapi.js';
function matchValues(valToMatch, valToMatchAgainst, matchingType) {
try {
if (typeof valToMatch != 'undefined') {
valToMatch = valToMatch.toLowerCase().trim();
valToMatchAgainst = valToMatchAgainst.toLowerCase().trim();
if (matchingType === 'strict') {
return valToMatch === valToMatchAgainst;
}
if (matchingType === 'loose') {
return valToMatch.includes(valToMatchAgainst) || (valToMatch == valToMatchAgainst);
}
}
} catch (e) {
console.log(`error in matchValues: ${e.message}`);
return false;
}
return false;
};
function filterResults(resultsToFilter, filter, options) {
let filteredData = [];
// now we have a list of rows, we can filter by various things
return resultsToFilter.filter(item => {
let addRow = null;
let filterMatches = [];
if (typeof item === 'undefined' ||
item.length <= 0 ||
Object.keys(item).length <= 0) {
return false;
}
Object.keys(filter).forEach(key => {
const filterValue = filter[key]; // e.g. 'archaeology'
const itemValue = item[key]; // e.g. 'department' or 'undefined'
filterMatches.push(matchValues(itemValue, filterValue, options.matching || 'loose'));
});
if (options.operator === 'or') {
addRow = filterMatches.some(match => match === true);
}
if (options.operator === 'and') {
addRow = filterMatches.every(match => match === true);
}
return addRow;
});
}
function processGSheetResults(JSONResponse, returnAllResults, filter, filterOptions) {
const data = JSONResponse.feed.entry;
const startRow = 2; // skip the header row(1), don't need it
let processedResults = [{}];
let colNames = {};
for (let item of data) {
const cell = item['gs$cell']; // gets cell data
const val = cell['$t']; // gets cell value
const columnNum = cell['col']; // gets the col number
const thisRow = cell['row']; // gets the row number
const colNameToAdd = colNames[columnNum]; // careful, this will be undefined if we hit it on the first pass
// don't add this row to the return data, but add it to list of column names
if (thisRow < startRow) {
colNames[columnNum] = val.toLowerCase();
continue; // skip the header row
}
if (typeof processedResults[thisRow] === 'undefined') {
processedResults[thisRow] = {};
}
if (typeof colNameToAdd !== 'undefined' && colNameToAdd.length > 0) {
processedResults[thisRow][colNameToAdd] = val;
}
}
// make sure we're only returning valid, filled data items
processedResults = processedResults.filter(result => Object.keys(result).length);
// if we're not filtering, then | convert_sheet_to_bubble_list | identifier_name |
alumniMap-controller.js | results"]
if (results.length > 0){
result = results[0];
lat = parseFloat(result["geometry"]["lat"]);
lon = parseFloat(result["geometry"]["lng"]);
return [lat, lon];
} else {
throw new Error('No Results');
}
})
.catch( error => console.error('Error fetching lat & lon:', error) );
} catch (err) {
console.log(`opencagedata API error: ${err}`);
return {};
}
}
async function process_results(results) {
var name_list = {}
var organized_by_alumn = {};
var location_dict = {}
results.forEach(element => {
alumn = element[1];
year = element[2];
us = element[5];
timestamp = Date.parse(element[0]);
lat = element[9]
lon = element[10]
console.log(lat,lon)
area = element[7]
console.log(area)
if (us == "Yes"){
city = element[3];
state = element[4];
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
}else{
organized_by_alumn[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
if (element[7]) {
organized_by_alumn[alumn]["location"] = element[7]+", "+state;
console.log("area");
}
}
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp){
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}else{
name_list[alumn] = {
"location": city+", "+state,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
} else {
if (alumn in organized_by_alumn) {
if (organized_by_alumn[alumn]["timestamp"] < timestamp){
delete organized_by_alumn[alumn];
}
}
place = element[6];
if (alumn in name_list) {
if (name_list[alumn]["timestamp"] < timestamp) |
}else{
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
}
}
});
organized_by_location = {};
for (let [alumn, val] of Object.entries(organized_by_alumn)) {
console.log("Organized by location");
loc = val.location;
console.log(loc);
if(loc in location_dict){
//
}
else{
location_dict[loc] = [val.lat,val.lon]
}
year = val.year;
alumn_with_year = alumn + " '"+year;
if (loc in organized_by_location){
organized_by_location[loc].push(alumn_with_year);
}else{
organized_by_location[loc] = [alumn_with_year];
}
}
bubble_list = []
for ( let [loc, alumn_list] of Object.entries(organized_by_location)) {
// let geo = await get_lat_and_lon(loc+", US");
let lat = location_dict[loc][0];
let lon = location_dict[loc][1];
bubble_list.push({location: loc, latitude: lat,
longitude: lon, radius: 3, fillKey: 'bubble',
alumns: alumn_list});
}
all_alumni = []
for (let [alumn, val] of Object.entries(name_list)) {
loc = val.location;
year = val.year;
list_entry = alumn + " '"+year+" • "+loc;
all_alumni.push(list_entry);
}
return [bubble_list, all_alumni];
}
async function convert_sheet_to_bubble_list(id) {
const options = {
sheetId: id,
sheetNumber: 1,
returnAllResults: true
};
return await gsheetProcessor(options, process_results);
}
async function create_bubble_list() {
let id = "12lGrmIhj2dlLHt2GNucD69IktFoOA5k9Zi9rnLR0OoI";
let bubble_list = await convert_sheet_to_bubble_list(id);
return bubble_list;
}
function create_popup(location, alumns) {
alumns_html = 'Residents:'
alumns_html += '<br><ul class="popup">\n'
for (var i=0; i<alumns.length; i++){
alumn = alumns[i];
alumns_html += "<li>"+alumn+"</li>\n";
}
alumns_html += "</ul>"
return "<b>"+location+"</b>"+"<br>"+alumns_html;
}
function create_table(list) {
alumns_html = "<ul id='alumni' class='alumni' style='list-style: none;'><li>"
for (var i=0; i<list.length; i++){
item = list[i];
alumns_html += "<p>"+item+"</p>"
}
alumns_html += "</li></ul>"
return alumns_html;
}
//--------------------------------------------------------
/* Credit: https://github.com/bpk68/g-sheets-api */
/* Used with slight modifications */
//import GSheetsapi from './gsheetsapi.js';
function matchValues(valToMatch, valToMatchAgainst, matchingType) {
try {
if (typeof valToMatch != 'undefined') {
valToMatch = valToMatch.toLowerCase().trim();
valToMatchAgainst = valToMatchAgainst.toLowerCase().trim();
if (matchingType === 'strict') {
return valToMatch === valToMatchAgainst;
}
if (matchingType === 'loose') {
return valToMatch.includes(valToMatchAgainst) || (valToMatch == valToMatchAgainst);
}
}
} catch (e) {
console.log(`error in matchValues: ${e.message}`);
return false;
}
return false;
};
function filterResults(resultsToFilter, filter, options) {
let filteredData = [];
// now we have a list of rows, we can filter by various things
return resultsToFilter.filter(item => {
let addRow = null;
let filterMatches = [];
if (typeof item === 'undefined' ||
item.length <= 0 ||
Object.keys(item).length <= 0) {
return false;
}
Object.keys(filter).forEach(key => {
const filterValue = filter[key]; // e.g. 'archaeology'
const itemValue = item[key]; // e.g. 'department' or 'undefined'
filterMatches.push(matchValues(itemValue, filterValue, options.matching || 'loose'));
});
if (options.operator === 'or') {
addRow = filterMatches.some(match => match === true);
}
if (options.operator === 'and') {
addRow = filterMatches.every(match => match === true);
}
return addRow;
});
}
function processGSheetResults(JSONResponse, returnAllResults, filter, filterOptions) {
const data = JSONResponse.feed.entry;
const startRow = 2; // skip the header row(1), don't need it
let processedResults = [{}];
let colNames = {};
for (let item of data) {
const cell = item['gs$cell']; // gets cell data
const val = cell['$t']; // gets cell value
const columnNum = cell['col']; // gets the col number
const thisRow = cell['row']; // gets the row number
const colNameToAdd = colNames[columnNum]; // careful, this will be undefined if we hit it on the first pass
// don't add this row to the return data, but add it to list of column names
if (thisRow < startRow) {
colNames[columnNum] = val.toLowerCase();
continue; // skip the header row
}
if (typeof processedResults[thisRow] === 'undefined') {
processedResults[thisRow] = {};
}
if (typeof colNameToAdd !== 'undefined' && colNameToAdd.length > 0) {
processedResults[thisRow][colNameToAdd] = val;
}
}
// make sure we're only returning valid, filled data items
processedResults = processedResults.filter(result => Object.keys(result).length);
// if we're not filtering, | {
name_list[alumn] = {
"location": place,
"timestamp": timestamp,
"year": year,
"lat": lat,
"lon":lon
};
} | conditional_block |
db.go | merge attempts, giving up")
}
func mkCommit(r *git.Repository, refname string, msg string, tree *git.Tree, parent *git.Commit, extraParents ...*git.Commit) (*git.Commit, error) {
var parents []*git.Commit
if parent != nil {
parents = append(parents, parent)
}
if len(extraParents) > 0 {
parents = append(parents, extraParents...)
}
id, err := r.CreateCommit(
refname,
&git.Signature{"libpack", "libpack", time.Now()}, // author
&git.Signature{"libpack", "libpack", time.Now()}, // committer
msg,
tree, // git tree to commit
parents...,
)
if err != nil {
return nil, err
}
return lookupCommit(r, id)
}
func isGitConcurrencyErr(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Class == 11 && gitErr.Code == -15
}
func isGitIterOver(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Code == git.ErrIterOver
}
// Pull downloads objects at the specified url and remote ref name,
// and updates the local ref of db.
// The uncommitted tree is left unchanged (ie uncommitted changes are
// not merged or rebased).
func (db *DB) Pull(url, ref string) error {
if ref == "" {
ref = db.ref
}
refspec := fmt.Sprintf("%s:%s", ref, db.ref)
fmt.Printf("Creating anonymous remote url=%s refspec=%s\n", url, refspec)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
if err := remote.Fetch(nil, nil, fmt.Sprintf("libpack.pull %s %s", url, refspec)); err != nil {
return err
}
return db.Update()
}
// Push uploads the committed contents of the db at the specified url and
// remote ref name. The remote ref is created if it doesn't exist.
func (db *DB) Push(url, ref string) error {
if ref == "" {
ref = db.ref
}
// The '+' prefix sets force=true,
// so the remote ref is created if it doesn't exist.
refspec := fmt.Sprintf("+%s:%s", db.ref, ref)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
push, err := remote.NewPush()
if err != nil {
return fmt.Errorf("git_push_new: %v", err)
}
defer push.Free()
if err := push.AddRefspec(refspec); err != nil {
return fmt.Errorf("git_push_refspec_add: %v", err)
}
if err := push.Finish(); err != nil {
return fmt.Errorf("git_push_finish: %v", err)
}
return nil
}
// Checkout populates the directory at dir with the committed
// contents of db. Uncommitted changes are ignored.
//
// As a convenience, if dir is an empty string, a temporary directory
// is created and returned, and the caller is responsible for removing it.
//
func (db *DB) Checkout(dir string) (checkoutDir string, err error) {
if db.parent != nil {
return db.parent.Checkout(path.Join(db.scope, dir))
}
head := db.Head()
if head == nil {
return "", fmt.Errorf("no head to checkout")
}
if dir == "" {
dir, err = ioutil.TempDir("", "libpack-checkout-")
if err != nil {
return "", err
}
defer func() {
if err != nil {
os.RemoveAll(dir)
}
}()
}
stderr := new(bytes.Buffer)
args := []string{
"--git-dir", db.repo.Path(), "--work-tree", dir,
"checkout", head.String(), ".",
}
cmd := exec.Command("git", args...)
cmd.Stderr = stderr
if err := cmd.Run(); err != nil {
return "", fmt.Errorf("%s", stderr.String())
}
// FIXME: enforce scoping in the git checkout command instead
// of here.
d := path.Join(dir, db.scope)
fmt.Printf("--> %s\n", d)
return d, nil
}
// Checkout populates the directory at dir with the uncommitted
// contents of db.
// FIXME: this does not work properly at the moment.
func (db *DB) CheckoutUncommitted(dir string) error {
if db.tree == nil {
return fmt.Errorf("no tree")
}
tree, err := TreeScope(db.repo, db.tree, db.scope)
if err != nil {
return err
}
// If the tree is empty, checkout will fail and there is
// nothing to do anyway
if tree.EntryCount() == 0 {
return nil
}
idx, err := ioutil.TempFile("", "libpack-index")
if err != nil {
return err
}
defer os.RemoveAll(idx.Name())
readTree := exec.Command(
"git",
"--git-dir", db.repo.Path(),
"--work-tree", dir,
"read-tree", tree.Id().String(),
)
readTree.Env = append(readTree.Env, "GIT_INDEX_FILE="+idx.Name())
stderr := new(bytes.Buffer)
readTree.Stderr = stderr
if err := readTree.Run(); err != nil {
return fmt.Errorf("%s", stderr.String())
}
checkoutIndex := exec.Command(
"git",
"--git-dir", db.repo.Path(),
"--work-tree", dir,
"checkout-index",
)
checkoutIndex.Env = append(checkoutIndex.Env, "GIT_INDEX_FILE="+idx.Name())
stderr = new(bytes.Buffer)
checkoutIndex.Stderr = stderr
if err := checkoutIndex.Run(); err != nil {
return fmt.Errorf("%s", stderr.String())
}
return nil
}
// ExecInCheckout checks out the committed contents of the database into a
// temporary directory, executes the specified command in a new subprocess
// with that directory as the working directory, then removes the directory.
//
// The standard input, output and error streams of the command are the same
// as the current process's.
func (db *DB) ExecInCheckout(path string, args ...string) error {
checkout, err := db.Checkout("")
if err != nil {
return fmt.Errorf("checkout: %v", err)
}
defer os.RemoveAll(checkout)
cmd := exec.Command(path, args...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Dir = checkout
return cmd.Run()
}
// lookupTree looks up an object at hash `id` in `repo`, and returns
// it as a git tree. If the object is not a tree, an error is returned.
func (db *DB) lookupTree(id *git.Oid) (*git.Tree, error) {
return lookupTree(db.repo, id)
}
func lookupTree(r *git.Repository, id *git.Oid) (*git.Tree, error) {
obj, err := r.Lookup(id)
if err != nil {
return nil, err
}
if tree, ok := obj.(*git.Tree); ok {
return tree, nil
}
return nil, fmt.Errorf("hash %v exist but is not a tree", id)
}
// lookupBlob looks up an object at hash `id` in `repo`, and returns
// it as a git blob. If the object is not a blob, an error is returned.
func lookupBlob(r *git.Repository, id *git.Oid) (*git.Blob, error) {
obj, err := r.Lookup(id)
if err != nil {
return nil, err
}
if blob, ok := obj.(*git.Blob); ok {
return blob, nil
}
return nil, fmt.Errorf("hash %v exist but is not a blob", id)
}
// lookupTip looks up the object referenced by refname, and returns it
// as a Commit object. If the reference does not exist, or if object is
// not a commit, nil is returned. Other errors cannot be detected.
func lookupTip(r *git.Repository, refname string) *git.Commit {
ref, err := r.LookupReference(refname)
if err != nil {
return nil
}
commit, err := lookupCommit(r, ref.Target())
if err != nil {
return nil
}
return commit
}
// lookupCommit looks up an object at hash `id` in `repo`, and returns
// it as a git commit. If the object is not a commit, an error is returned.
func lookupCommit(r *git.Repository, id *git.Oid) (*git.Commit, error) {
obj, err := r.Lookup(id)
if err != nil {
return nil, err
}
if commit, ok := obj.(*git.Commit); ok {
return commit, nil
}
return nil, fmt.Errorf("hash %v exist but is not a commit", id)
}
// emptyTree creates an empty Git tree and returns its ID
// (the ID will always be the same)
func emptyTree(repo *git.Repository) (*git.Oid, error) | {
builder, err := repo.TreeBuilder()
if err != nil {
return nil, err
}
return builder.Write()
} | identifier_body |
|
db.go | of the Git blob at path `key`.
// If there is no blob at the specified key, an error
// is returned.
func (db *DB) Get(key string) (string, error) {
if db.parent != nil {
return db.parent.Get(path.Join(db.scope, key))
}
return TreeGet(db.repo, db.tree, path.Join(db.scope, key))
}
// Set writes the specified value in a Git blob, and updates the
// uncommitted tree to point to that blob as `key`.
func (db *DB) Set(key, value string) error {
if db.parent != nil {
return db.parent.Set(path.Join(db.scope, key), value)
}
p := NewPipeline(db.repo)
newTree, err := p.Base(db.tree).Set(path.Join(db.scope, key), value).Run()
if err != nil {
return err
}
db.tree = newTree
return nil
}
// SetStream writes the data from `src` to a new Git blob,
// and updates the uncommitted tree to point to that blob as `key`.
func (db *DB) SetStream(key string, src io.Reader) error {
// FIXME: instead of buffering the entire value, use
// libgit2 CreateBlobFromChunks to stream the data straight
// into git.
buf := new(bytes.Buffer)
_, err := io.Copy(buf, src)
if err != nil {
return err
}
return db.Set(key, buf.String())
}
func TreePath(p string) string {
p = path.Clean(p)
if p == "/" || p == "." {
return "/"
}
// Remove leading / from the path
// as libgit2.TreeEntryByPath does not accept it
p = strings.TrimLeft(p, "/")
return p
}
// List returns a list of object names at the subtree `key`.
// If there is no subtree at `key`, an error is returned.
func (db *DB) List(key string) ([]string, error) {
return TreeList(db.repo, db.tree, path.Join(db.scope, key))
}
// Commit atomically stores all database changes since the last commit
// into a new Git commit object, and updates the database's reference
// to point to that commit.
func (db *DB) Commit(msg string) error {
if db.parent != nil {
return db.parent.Commit(msg)
}
db.l.Lock()
defer db.l.Unlock()
if db.tree == nil {
// Nothing to commit
return nil
}
commit, err := CommitToRef(db.repo, db.tree, db.commit, db.ref, msg)
if err != nil {
return err
}
if db.commit != nil {
db.commit.Free()
}
db.commit = commit
return nil
}
func CommitToRef(r *git.Repository, tree *git.Tree, parent *git.Commit, refname, msg string) (*git.Commit, error) {
// Retry loop in case of conflict
// FIXME: use a custom inter-process lock as a first attempt for performance
var (
needMerge bool
tmpCommit *git.Commit
)
for {
if !needMerge {
// Create simple commit
commit, err := mkCommit(r, refname, msg, tree, parent)
if isGitConcurrencyErr(err) {
needMerge = true
continue
}
return commit, err
} else {
if tmpCommit == nil {
var err error
// Create a temporary intermediary commit, to pass to MergeCommits
// NOTE: this commit will not be part of the final history.
tmpCommit, err = mkCommit(r, "", msg, tree, parent)
if err != nil {
return nil, err
}
defer tmpCommit.Free()
}
// Lookup tip from ref
tip := lookupTip(r, refname)
if tip == nil {
// Ref may have been deleted after previous merge error
needMerge = false
continue
}
// Merge simple commit with the tip
opts, err := git.DefaultMergeOptions()
if err != nil {
return nil, err
}
idx, err := r.MergeCommits(tmpCommit, tip, &opts)
if err != nil {
return nil, err
}
conflicts, err := idx.ConflictIterator()
if err != nil {
return nil, err
}
defer conflicts.Free()
for {
c, err := conflicts.Next()
if isGitIterOver(err) {
break
} else if err != nil {
return nil, err
}
if c.Our != nil {
idx.RemoveConflict(c.Our.Path)
if err := idx.Add(c.Our); err != nil {
return nil, fmt.Errorf("error resolving merge conflict for '%s': %v", c.Our.Path, err)
}
}
}
mergedId, err := idx.WriteTreeTo(r)
if err != nil {
return nil, fmt.Errorf("WriteTree: %v", err)
}
mergedTree, err := lookupTree(r, mergedId)
if err != nil {
return nil, err
}
// Create new commit from merged tree (discarding simple commit)
commit, err := mkCommit(r, refname, msg, mergedTree, parent, tip)
if isGitConcurrencyErr(err) {
// FIXME: enforce a maximum number of retries to avoid infinite loops
continue
}
return commit, err
}
}
return nil, fmt.Errorf("too many failed merge attempts, giving up")
}
func mkCommit(r *git.Repository, refname string, msg string, tree *git.Tree, parent *git.Commit, extraParents ...*git.Commit) (*git.Commit, error) {
var parents []*git.Commit
if parent != nil {
parents = append(parents, parent)
}
if len(extraParents) > 0 {
parents = append(parents, extraParents...)
}
id, err := r.CreateCommit(
refname,
&git.Signature{"libpack", "libpack", time.Now()}, // author
&git.Signature{"libpack", "libpack", time.Now()}, // committer
msg,
tree, // git tree to commit
parents...,
)
if err != nil {
return nil, err
}
return lookupCommit(r, id)
}
func isGitConcurrencyErr(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Class == 11 && gitErr.Code == -15
}
func isGitIterOver(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Code == git.ErrIterOver
}
// Pull downloads objects at the specified url and remote ref name,
// and updates the local ref of db.
// The uncommitted tree is left unchanged (ie uncommitted changes are
// not merged or rebased).
func (db *DB) Pull(url, ref string) error {
if ref == "" {
ref = db.ref
}
refspec := fmt.Sprintf("%s:%s", ref, db.ref)
fmt.Printf("Creating anonymous remote url=%s refspec=%s\n", url, refspec)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
if err := remote.Fetch(nil, nil, fmt.Sprintf("libpack.pull %s %s", url, refspec)); err != nil {
return err
}
return db.Update()
}
// Push uploads the committed contents of the db at the specified url and
// remote ref name. The remote ref is created if it doesn't exist.
func (db *DB) Push(url, ref string) error {
if ref == "" {
ref = db.ref
}
// The '+' prefix sets force=true,
// so the remote ref is created if it doesn't exist.
refspec := fmt.Sprintf("+%s:%s", db.ref, ref)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
push, err := remote.NewPush()
if err != nil {
return fmt.Errorf("git_push_new: %v", err)
}
defer push.Free()
if err := push.AddRefspec(refspec); err != nil {
return fmt.Errorf("git_push_refspec_add: %v", err)
}
if err := push.Finish(); err != nil {
return fmt.Errorf("git_push_finish: %v", err)
}
return nil
}
// Checkout populates the directory at dir with the committed
// contents of db. Uncommitted changes are ignored.
//
// As a convenience, if dir is an empty string, a temporary directory
// is created and returned, and the caller is responsible for removing it.
//
func (db *DB) Checkout(dir string) (checkoutDir string, err error) {
if db.parent != nil {
return db.parent.Checkout(path.Join(db.scope, dir))
}
head := db.Head()
if head == nil {
return "", fmt.Errorf("no head to checkout")
} | if dir == "" {
dir, err = ioutil.TempDir("", "libpack-checkout-")
if err != nil {
return "", err
} | random_line_split |
|
db.go |
return db, nil
}
func newRepo(repo *git.Repository, ref string) (*DB, error) {
db := &DB{
repo: repo,
ref: ref,
}
if err := db.Update(); err != nil {
db.Free()
return nil, err
}
return db, nil
}
// Free must be called to release resources when a database is no longer
// in use.
// This is required in addition to Golang garbage collection, because
// of the libgit2 C bindings.
func (db *DB) Free() {
db.l.Lock()
db.repo.Free()
if db.commit != nil {
db.commit.Free()
}
db.l.Unlock()
}
// Head returns the id of the latest commit
func (db *DB) Head() *git.Oid {
db.l.RLock()
defer db.l.RUnlock()
if db.commit != nil {
return db.commit.Id()
}
return nil
}
func (db *DB) Latest() *git.Oid {
if db.tree != nil {
return db.tree.Id()
}
return nil
}
func (db *DB) Repo() *git.Repository {
return db.repo
}
func (db *DB) Tree() (*git.Tree, error) {
return TreeScope(db.repo, db.tree, db.scope)
}
func (db *DB) Dump(dst io.Writer) error {
return TreeDump(db.repo, db.tree, path.Join(db.scope, "/"), dst)
}
// AddDB copies the contents of src into db at prefix key.
// The resulting content is the union of the new tree and
// the pre-existing tree, if any.
// In case of a conflict, the content of the new tree wins.
// Conflicts are resolved at the file granularity (content is
// never merged).
func (db *DB) AddDB(key string, src *DB) error {
// No tree to add, nothing to do
src.l.RLock()
defer src.l.RUnlock()
if src.tree == nil {
return nil
}
return db.Add(key, src.tree.Id())
}
func (db *DB) Add(key string, obj interface{}) error {
db.l.Lock()
defer db.l.Unlock()
if db.parent != nil {
return db.parent.Add(path.Join(db.scope, key), obj)
}
newTree, err := NewPipeline(db.repo).Base(db.tree).Add(key, obj, true).Run()
if err != nil {
return err
}
db.tree = newTree
return nil
}
func (db *DB) Walk(key string, h func(string, git.Object) error) error {
return TreeWalk(db.repo, db.tree, path.Join(db.scope, key), h)
}
// Update looks up the value of the database's reference, and changes
// the memory representation accordingly.
// If the committed tree is changed, then uncommitted changes are lost.
func (db *DB) Update() error {
db.l.Lock()
defer db.l.Unlock()
tip, err := db.repo.LookupReference(db.ref)
if err != nil {
db.commit = nil
return nil
}
// If we already have the latest commit, don't do anything
if db.commit != nil && db.commit.Id().Equal(tip.Target()) {
return nil
}
commit, err := lookupCommit(db.repo, tip.Target())
if err != nil {
return err
}
if db.commit != nil {
db.commit.Free()
}
db.commit = commit
if db.tree != nil {
db.tree.Free()
}
if commitTree, err := commit.Tree(); err != nil {
return err
} else {
db.tree = commitTree
}
return nil
}
// Mkdir adds an empty subtree at key if it doesn't exist.
func (db *DB) Mkdir(key string) error {
db.l.Lock()
defer db.l.Unlock()
if db.parent != nil {
return db.parent.Mkdir(path.Join(db.scope, key))
}
p := NewPipeline(db.repo)
newTree, err := p.Base(db.tree).Mkdir(path.Join(db.scope, key)).Run()
if err != nil {
return err
}
db.tree = newTree
return nil
}
// Get returns the value of the Git blob at path `key`.
// If there is no blob at the specified key, an error
// is returned.
func (db *DB) Get(key string) (string, error) {
if db.parent != nil {
return db.parent.Get(path.Join(db.scope, key))
}
return TreeGet(db.repo, db.tree, path.Join(db.scope, key))
}
// Set writes the specified value in a Git blob, and updates the
// uncommitted tree to point to that blob as `key`.
func (db *DB) Set(key, value string) error {
if db.parent != nil {
return db.parent.Set(path.Join(db.scope, key), value)
}
p := NewPipeline(db.repo)
newTree, err := p.Base(db.tree).Set(path.Join(db.scope, key), value).Run()
if err != nil {
return err
}
db.tree = newTree
return nil
}
// SetStream writes the data from `src` to a new Git blob,
// and updates the uncommitted tree to point to that blob as `key`.
func (db *DB) SetStream(key string, src io.Reader) error {
// FIXME: instead of buffering the entire value, use
// libgit2 CreateBlobFromChunks to stream the data straight
// into git.
buf := new(bytes.Buffer)
_, err := io.Copy(buf, src)
if err != nil {
return err
}
return db.Set(key, buf.String())
}
func TreePath(p string) string {
p = path.Clean(p)
if p == "/" || p == "." {
return "/"
}
// Remove leading / from the path
// as libgit2.TreeEntryByPath does not accept it
p = strings.TrimLeft(p, "/")
return p
}
// List returns a list of object names at the subtree `key`.
// If there is no subtree at `key`, an error is returned.
func (db *DB) List(key string) ([]string, error) {
return TreeList(db.repo, db.tree, path.Join(db.scope, key))
}
// Commit atomically stores all database changes since the last commit
// into a new Git commit object, and updates the database's reference
// to point to that commit.
func (db *DB) Commit(msg string) error {
if db.parent != nil {
return db.parent.Commit(msg)
}
db.l.Lock()
defer db.l.Unlock()
if db.tree == nil {
// Nothing to commit
return nil
}
commit, err := CommitToRef(db.repo, db.tree, db.commit, db.ref, msg)
if err != nil {
return err
}
if db.commit != nil {
db.commit.Free()
}
db.commit = commit
return nil
}
func CommitToRef(r *git.Repository, tree *git.Tree, parent *git.Commit, refname, msg string) (*git.Commit, error) {
// Retry loop in case of conflict
// FIXME: use a custom inter-process lock as a first attempt for performance
var (
needMerge bool
tmpCommit *git.Commit
)
for {
if !needMerge {
// Create simple commit
commit, err := mkCommit(r, refname, msg, tree, parent)
if isGitConcurrencyErr(err) {
needMerge = true
continue
}
return commit, err
} else {
if tmpCommit == nil {
var err error
// Create a temporary intermediary commit, to pass to MergeCommits
// NOTE: this commit will not be part of the final history.
tmpCommit, err = mkCommit(r, "", msg, tree, parent)
if err != nil {
return nil, err
}
defer tmpCommit.Free()
}
// Lookup tip from ref
tip := lookupTip(r, refname)
if tip == nil {
// Ref may have been deleted after previous merge error
needMerge = false
continue
}
// Merge simple commit with the tip
opts, err := git.DefaultMergeOptions()
if err != nil {
return nil, err
}
idx, err := r.MergeCommits(tmpCommit, tip, &opts)
if err != nil {
return nil, err
}
conflicts, err := idx.ConflictIterator()
if err != nil {
return nil, err
}
defer conflicts.Free()
for {
c, err := conflicts.Next()
if isGitIterOver(err) {
break
} else if err != nil {
return nil, err
}
if c.Our != nil {
idx.RemoveConflict(c.Our.Path)
if err := idx.Add(c.Our); err != nil {
return nil, fmt.Errorf("error resolving merge conflict for '%s': %v", c.Our.Path, err)
}
}
}
mergedId, err := idx.WriteTreeTo(r)
if err != nil {
return nil, fmt.Errorf("WriteTree: %v", err)
}
mergedTree, err := lookupTree(r, mergedId)
if err != nil {
return nil, err
}
// Create new commit from merged tree (discarding simple commit)
commit, err | {
return nil, err
} | conditional_block |
|
db.go | ()
if isGitIterOver(err) {
break
} else if err != nil {
return nil, err
}
if c.Our != nil {
idx.RemoveConflict(c.Our.Path)
if err := idx.Add(c.Our); err != nil {
return nil, fmt.Errorf("error resolving merge conflict for '%s': %v", c.Our.Path, err)
}
}
}
mergedId, err := idx.WriteTreeTo(r)
if err != nil {
return nil, fmt.Errorf("WriteTree: %v", err)
}
mergedTree, err := lookupTree(r, mergedId)
if err != nil {
return nil, err
}
// Create new commit from merged tree (discarding simple commit)
commit, err := mkCommit(r, refname, msg, mergedTree, parent, tip)
if isGitConcurrencyErr(err) {
// FIXME: enforce a maximum number of retries to avoid infinite loops
continue
}
return commit, err
}
}
return nil, fmt.Errorf("too many failed merge attempts, giving up")
}
func mkCommit(r *git.Repository, refname string, msg string, tree *git.Tree, parent *git.Commit, extraParents ...*git.Commit) (*git.Commit, error) {
var parents []*git.Commit
if parent != nil {
parents = append(parents, parent)
}
if len(extraParents) > 0 {
parents = append(parents, extraParents...)
}
id, err := r.CreateCommit(
refname,
&git.Signature{"libpack", "libpack", time.Now()}, // author
&git.Signature{"libpack", "libpack", time.Now()}, // committer
msg,
tree, // git tree to commit
parents...,
)
if err != nil {
return nil, err
}
return lookupCommit(r, id)
}
func isGitConcurrencyErr(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Class == 11 && gitErr.Code == -15
}
func isGitIterOver(err error) bool {
gitErr, ok := err.(*git.GitError)
if !ok {
return false
}
return gitErr.Code == git.ErrIterOver
}
// Pull downloads objects at the specified url and remote ref name,
// and updates the local ref of db.
// The uncommitted tree is left unchanged (ie uncommitted changes are
// not merged or rebased).
func (db *DB) Pull(url, ref string) error {
if ref == "" {
ref = db.ref
}
refspec := fmt.Sprintf("%s:%s", ref, db.ref)
fmt.Printf("Creating anonymous remote url=%s refspec=%s\n", url, refspec)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
if err := remote.Fetch(nil, nil, fmt.Sprintf("libpack.pull %s %s", url, refspec)); err != nil {
return err
}
return db.Update()
}
// Push uploads the committed contents of the db at the specified url and
// remote ref name. The remote ref is created if it doesn't exist.
func (db *DB) Push(url, ref string) error {
if ref == "" {
ref = db.ref
}
// The '+' prefix sets force=true,
// so the remote ref is created if it doesn't exist.
refspec := fmt.Sprintf("+%s:%s", db.ref, ref)
remote, err := db.repo.CreateAnonymousRemote(url, refspec)
if err != nil {
return err
}
defer remote.Free()
push, err := remote.NewPush()
if err != nil {
return fmt.Errorf("git_push_new: %v", err)
}
defer push.Free()
if err := push.AddRefspec(refspec); err != nil {
return fmt.Errorf("git_push_refspec_add: %v", err)
}
if err := push.Finish(); err != nil {
return fmt.Errorf("git_push_finish: %v", err)
}
return nil
}
// Checkout populates the directory at dir with the committed
// contents of db. Uncommitted changes are ignored.
//
// As a convenience, if dir is an empty string, a temporary directory
// is created and returned, and the caller is responsible for removing it.
//
func (db *DB) Checkout(dir string) (checkoutDir string, err error) {
if db.parent != nil {
return db.parent.Checkout(path.Join(db.scope, dir))
}
head := db.Head()
if head == nil {
return "", fmt.Errorf("no head to checkout")
}
if dir == "" {
dir, err = ioutil.TempDir("", "libpack-checkout-")
if err != nil {
return "", err
}
defer func() {
if err != nil {
os.RemoveAll(dir)
}
}()
}
stderr := new(bytes.Buffer)
args := []string{
"--git-dir", db.repo.Path(), "--work-tree", dir,
"checkout", head.String(), ".",
}
cmd := exec.Command("git", args...)
cmd.Stderr = stderr
if err := cmd.Run(); err != nil {
return "", fmt.Errorf("%s", stderr.String())
}
// FIXME: enforce scoping in the git checkout command instead
// of here.
d := path.Join(dir, db.scope)
fmt.Printf("--> %s\n", d)
return d, nil
}
// Checkout populates the directory at dir with the uncommitted
// contents of db.
// FIXME: this does not work properly at the moment.
func (db *DB) CheckoutUncommitted(dir string) error {
if db.tree == nil {
return fmt.Errorf("no tree")
}
tree, err := TreeScope(db.repo, db.tree, db.scope)
if err != nil {
return err
}
// If the tree is empty, checkout will fail and there is
// nothing to do anyway
if tree.EntryCount() == 0 {
return nil
}
idx, err := ioutil.TempFile("", "libpack-index")
if err != nil {
return err
}
defer os.RemoveAll(idx.Name())
readTree := exec.Command(
"git",
"--git-dir", db.repo.Path(),
"--work-tree", dir,
"read-tree", tree.Id().String(),
)
readTree.Env = append(readTree.Env, "GIT_INDEX_FILE="+idx.Name())
stderr := new(bytes.Buffer)
readTree.Stderr = stderr
if err := readTree.Run(); err != nil {
return fmt.Errorf("%s", stderr.String())
}
checkoutIndex := exec.Command(
"git",
"--git-dir", db.repo.Path(),
"--work-tree", dir,
"checkout-index",
)
checkoutIndex.Env = append(checkoutIndex.Env, "GIT_INDEX_FILE="+idx.Name())
stderr = new(bytes.Buffer)
checkoutIndex.Stderr = stderr
if err := checkoutIndex.Run(); err != nil {
return fmt.Errorf("%s", stderr.String())
}
return nil
}
// ExecInCheckout checks out the committed contents of the database into a
// temporary directory, executes the specified command in a new subprocess
// with that directory as the working directory, then removes the directory.
//
// The standard input, output and error streams of the command are the same
// as the current process's.
func (db *DB) ExecInCheckout(path string, args ...string) error {
checkout, err := db.Checkout("")
if err != nil {
return fmt.Errorf("checkout: %v", err)
}
defer os.RemoveAll(checkout)
cmd := exec.Command(path, args...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Dir = checkout
return cmd.Run()
}
// lookupTree looks up an object at hash `id` in `repo`, and returns
// it as a git tree. If the object is not a tree, an error is returned.
func (db *DB) lookupTree(id *git.Oid) (*git.Tree, error) {
return lookupTree(db.repo, id)
}
func lookupTree(r *git.Repository, id *git.Oid) (*git.Tree, error) {
obj, err := r.Lookup(id)
if err != nil {
return nil, err
}
if tree, ok := obj.(*git.Tree); ok {
return tree, nil
}
return nil, fmt.Errorf("hash %v exist but is not a tree", id)
}
// lookupBlob looks up an object at hash `id` in `repo`, and returns
// it as a git blob. If the object is not a blob, an error is returned.
func lookupBlob(r *git.Repository, id *git.Oid) (*git.Blob, error) {
obj, err := r.Lookup(id)
if err != nil {
return nil, err
}
if blob, ok := obj.(*git.Blob); ok {
return blob, nil
}
return nil, fmt.Errorf("hash %v exist but is not a blob", id)
}
// lookupTip looks up the object referenced by refname, and returns it
// as a Commit object. If the reference does not exist, or if object is
// not a commit, nil is returned. Other errors cannot be detected.
func | lookupTip | identifier_name |
|
file_hook.rs | 0x00, 0x00, 0x00, 0x00, 0x02, 0x10];
static DUMMY_SKINS: &[u8] = br#"{"skins":[]}"#;
fn check_dummied_out_hd(path: &[u8]) -> Option<&'static [u8]> {
if path.ends_with(b".anim") {
// Avoid touching tileset/foliage.anim
if path.starts_with(b"anim/") {
return Some(DUMMY_ANIM);
}
} else if path.ends_with(b".dds") {
// Font dds files are used (only) in SD, but they aren't loaded
// on file param SD.
if !path.starts_with(b"font/") {
// Anim happens to have a dds inside it :)
let dummy_dds = &DUMMY_ANIM[0x174..];
return Some(dummy_dds);
}
} else if path.ends_with(b".dds.vr4") {
return Some(DUMMY_DDSGRP);
} else if path.ends_with(b".dds.grp") {
// Avoid tileset.dds.grps, they need their frames
if path.starts_with(b"unit/") || path.starts_with(b"effect/") {
return Some(DUMMY_DDSGRP);
}
} else if path == b"anim/skins.json" {
return Some(DUMMY_SKINS);
}
None
}
/// If `params` has a file extension set, it will override whatever
/// extension `path` has.
///
/// Why it is done like that, I have no idea.
///
/// This function also normalizes to ascii lowercase and replaces any '\\' with '/'
unsafe fn real_path<'a>(
path: *const u8,
params: *const scr::OpenParams,
buffer: &'a mut ArrayVec<[u8; 256]>,
) -> Option<&'a [u8]> {
let c_path = CStr::from_ptr(path as *const i8);
let c_path = c_path.to_bytes();
let alt_extension = if (*params).extension.is_null() {
None
} else {
Some(CStr::from_ptr((*params).extension as *const i8))
};
let c_path_for_switched_extension = match alt_extension.is_some() {
true => match c_path.iter().rev().position(|&x| x == b'.') {
Some(period) => &c_path[..c_path.len() - period - 1],
None => c_path,
},
false => c_path,
};
if let Err(_) = buffer.try_extend_from_slice(c_path_for_switched_extension) {
return None;
}
if let Some(ext) = alt_extension {
if let Err(_) = buffer.try_extend_from_slice(ext.to_bytes()) {
return None;
}
}
let slice = &mut buffer[..];
for val in slice.iter_mut() {
match *val {
b'A' ..= b'Z' => {
*val = b'a' + (*val - b'A');
}
b'\\' => {
*val = b'/';
}
_ => (),
}
}
Some(slice)
}
unsafe fn memory_buffer_to_bw_file_handle(buffer: &'static [u8], handle: *mut scr::FileHandle) {
let inner = Box::new(FileAllocation {
file: FileState {
buffer,
pos: 0,
},
read: scr::FileRead {
vtable: &*FILE_READ_VTABLE,
inner: null_mut(),
},
peek: scr::FilePeek {
vtable: &*FILE_PEEK_VTABLE,
inner: null_mut(),
},
metadata: scr::FileMetadata {
vtable: &*FILE_METADATA_VTABLE,
inner: null_mut(),
},
});
let inner_ptr = Box::into_raw(inner);
(*inner_ptr).metadata.inner = inner_ptr as *mut c_void;
(*inner_ptr).peek.inner = inner_ptr as *mut c_void;
(*inner_ptr).read.inner = inner_ptr as *mut c_void;
let close_callback = scr::Function {
vtable: &*FUNCTION_VTABLE,
inner: inner_ptr as *mut c_void,
};
*handle = scr::FileHandle {
vtable: &*FILE_HANDLE_VTABLE1,
vtable2: &*FILE_HANDLE_VTABLE2,
vtable3: &*FILE_HANDLE_VTABLE3,
metadata: &mut (*inner_ptr).metadata,
peek: &mut (*inner_ptr).peek,
read: &mut (*inner_ptr).read,
file_ok: 1,
close_callback,
};
}
struct FileAllocation {
file: FileState,
read: scr::FileRead,
peek: scr::FilePeek,
metadata: scr::FileMetadata,
}
struct FileState {
buffer: &'static [u8],
pos: u32,
}
lazy_static! {
static ref FILE_HANDLE_VTABLE1: scr::V_FileHandle1 = scr::V_FileHandle1 {
destroy: Thiscall::new(file_handle_destroy_nop),
read: Thiscall::new(read_file_wrap),
skip: Thiscall::new(skip_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE2: scr::V_FileHandle2 = scr::V_FileHandle2 {
unk0: [0; 1],
peek: Thiscall::new(peek_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE3: scr::V_FileHandle3 = scr::V_FileHandle3 {
unk0: [0; 1],
tell: Thiscall::new(tell_wrap),
seek: Thiscall::new(seek_wrap),
file_size: Thiscall::new(file_size_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_METADATA_VTABLE: scr::V_FileMetadata = scr::V_FileMetadata {
unk0: [0; 1],
tell: Thiscall::new(tell),
seek: Thiscall::new(seek),
file_size: Thiscall::new(file_size),
safety_padding: [0; 0x20],
};
static ref FILE_READ_VTABLE: scr::V_FileRead = scr::V_FileRead {
destroy: 0,
read: Thiscall::new(read_file),
skip: Thiscall::new(skip),
safety_padding: [0; 0x20],
};
static ref FILE_PEEK_VTABLE: scr::V_FilePeek = scr::V_FilePeek {
destroy: 0,
peek: Thiscall::new(peek),
safety_padding: [0; 0x20],
};
static ref FUNCTION_VTABLE: scr::V_Function = scr::V_Function {
destroy_inner: Thiscall::new(function_nop_destory),
invoke: Thiscall::new(close_file),
get_sizes: Thiscall::new(function_object_size),
copy: Thiscall::new(function_copy),
copy2: Thiscall::new(function_copy),
safety_padding: [0; 0x20],
};
}
unsafe extern fn file_handle_destroy_nop(_file: *mut scr::FileHandle, _dyn_free: u32) {
}
unsafe extern fn function_nop_destory(_file: *mut scr::Function, _unk: u32) {
}
unsafe extern fn function_object_size(
_file: *mut scr::Function,
size: *mut u32,
) {
*size = 0xc;
*size.add(1) = 0x4;
*(size.add(2) as *mut u8) = 0x1;
}
unsafe extern fn function_copy(this: *mut scr::Function, other: *mut scr::Function) {
*other = *this;
}
unsafe extern fn read_file_wrap(file: *mut scr::FileHandle, out: *mut u8, size: u32) -> u32 {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).read.call3(read, out, size)
}
unsafe extern fn skip_wrap(file: *mut scr::FileHandle, size: u32) |
unsafe extern fn read_file(file: *mut scr::FileRead, out: *mut u8, size: u32) -> u32 {
let file = (*file).inner as *mut FileAllocation;
let buf = std::slice::from_raw_parts_mut(out, size as usize);
(*file).file.read(buf)
}
unsafe extern fn skip(file: *mut scr::FileRead, size: u32) {
let file = (*file).inner as *mut FileAllocation;
let pos = (*file).file.tell();
(*file).file.seek(pos.saturating_add(size));
}
unsafe extern fn peek_wrap(file: *mut c_void, out: *mut u8, size: u32) -> u32 {
let file = (file as usize - 4) as *mut scr::FileHandle;
let peek = (*file). | {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).skip.call2(read, size)
} | identifier_body |
file_hook.rs | 0x00, 0x00, 0x00, 0x00, 0x02, 0x10];
static DUMMY_SKINS: &[u8] = br#"{"skins":[]}"#;
fn check_dummied_out_hd(path: &[u8]) -> Option<&'static [u8]> {
if path.ends_with(b".anim") {
// Avoid touching tileset/foliage.anim
if path.starts_with(b"anim/") {
return Some(DUMMY_ANIM);
}
} else if path.ends_with(b".dds") {
// Font dds files are used (only) in SD, but they aren't loaded
// on file param SD.
if !path.starts_with(b"font/") {
// Anim happens to have a dds inside it :)
let dummy_dds = &DUMMY_ANIM[0x174..];
return Some(dummy_dds);
}
} else if path.ends_with(b".dds.vr4") {
return Some(DUMMY_DDSGRP);
} else if path.ends_with(b".dds.grp") {
// Avoid tileset.dds.grps, they need their frames
if path.starts_with(b"unit/") || path.starts_with(b"effect/") {
return Some(DUMMY_DDSGRP);
}
} else if path == b"anim/skins.json" {
return Some(DUMMY_SKINS);
}
None
}
/// If `params` has a file extension set, it will override whatever
/// extension `path` has.
///
/// Why it is done like that, I have no idea.
///
/// This function also normalizes to ascii lowercase and replaces any '\\' with '/'
unsafe fn real_path<'a>(
path: *const u8,
params: *const scr::OpenParams,
buffer: &'a mut ArrayVec<[u8; 256]>,
) -> Option<&'a [u8]> {
let c_path = CStr::from_ptr(path as *const i8);
let c_path = c_path.to_bytes();
let alt_extension = if (*params).extension.is_null() {
None
} else {
Some(CStr::from_ptr((*params).extension as *const i8))
};
| true => match c_path.iter().rev().position(|&x| x == b'.') {
Some(period) => &c_path[..c_path.len() - period - 1],
None => c_path,
},
false => c_path,
};
if let Err(_) = buffer.try_extend_from_slice(c_path_for_switched_extension) {
return None;
}
if let Some(ext) = alt_extension {
if let Err(_) = buffer.try_extend_from_slice(ext.to_bytes()) {
return None;
}
}
let slice = &mut buffer[..];
for val in slice.iter_mut() {
match *val {
b'A' ..= b'Z' => {
*val = b'a' + (*val - b'A');
}
b'\\' => {
*val = b'/';
}
_ => (),
}
}
Some(slice)
}
unsafe fn memory_buffer_to_bw_file_handle(buffer: &'static [u8], handle: *mut scr::FileHandle) {
let inner = Box::new(FileAllocation {
file: FileState {
buffer,
pos: 0,
},
read: scr::FileRead {
vtable: &*FILE_READ_VTABLE,
inner: null_mut(),
},
peek: scr::FilePeek {
vtable: &*FILE_PEEK_VTABLE,
inner: null_mut(),
},
metadata: scr::FileMetadata {
vtable: &*FILE_METADATA_VTABLE,
inner: null_mut(),
},
});
let inner_ptr = Box::into_raw(inner);
(*inner_ptr).metadata.inner = inner_ptr as *mut c_void;
(*inner_ptr).peek.inner = inner_ptr as *mut c_void;
(*inner_ptr).read.inner = inner_ptr as *mut c_void;
let close_callback = scr::Function {
vtable: &*FUNCTION_VTABLE,
inner: inner_ptr as *mut c_void,
};
*handle = scr::FileHandle {
vtable: &*FILE_HANDLE_VTABLE1,
vtable2: &*FILE_HANDLE_VTABLE2,
vtable3: &*FILE_HANDLE_VTABLE3,
metadata: &mut (*inner_ptr).metadata,
peek: &mut (*inner_ptr).peek,
read: &mut (*inner_ptr).read,
file_ok: 1,
close_callback,
};
}
struct FileAllocation {
file: FileState,
read: scr::FileRead,
peek: scr::FilePeek,
metadata: scr::FileMetadata,
}
struct FileState {
buffer: &'static [u8],
pos: u32,
}
lazy_static! {
static ref FILE_HANDLE_VTABLE1: scr::V_FileHandle1 = scr::V_FileHandle1 {
destroy: Thiscall::new(file_handle_destroy_nop),
read: Thiscall::new(read_file_wrap),
skip: Thiscall::new(skip_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE2: scr::V_FileHandle2 = scr::V_FileHandle2 {
unk0: [0; 1],
peek: Thiscall::new(peek_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE3: scr::V_FileHandle3 = scr::V_FileHandle3 {
unk0: [0; 1],
tell: Thiscall::new(tell_wrap),
seek: Thiscall::new(seek_wrap),
file_size: Thiscall::new(file_size_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_METADATA_VTABLE: scr::V_FileMetadata = scr::V_FileMetadata {
unk0: [0; 1],
tell: Thiscall::new(tell),
seek: Thiscall::new(seek),
file_size: Thiscall::new(file_size),
safety_padding: [0; 0x20],
};
static ref FILE_READ_VTABLE: scr::V_FileRead = scr::V_FileRead {
destroy: 0,
read: Thiscall::new(read_file),
skip: Thiscall::new(skip),
safety_padding: [0; 0x20],
};
static ref FILE_PEEK_VTABLE: scr::V_FilePeek = scr::V_FilePeek {
destroy: 0,
peek: Thiscall::new(peek),
safety_padding: [0; 0x20],
};
static ref FUNCTION_VTABLE: scr::V_Function = scr::V_Function {
destroy_inner: Thiscall::new(function_nop_destory),
invoke: Thiscall::new(close_file),
get_sizes: Thiscall::new(function_object_size),
copy: Thiscall::new(function_copy),
copy2: Thiscall::new(function_copy),
safety_padding: [0; 0x20],
};
}
unsafe extern fn file_handle_destroy_nop(_file: *mut scr::FileHandle, _dyn_free: u32) {
}
unsafe extern fn function_nop_destory(_file: *mut scr::Function, _unk: u32) {
}
unsafe extern fn function_object_size(
_file: *mut scr::Function,
size: *mut u32,
) {
*size = 0xc;
*size.add(1) = 0x4;
*(size.add(2) as *mut u8) = 0x1;
}
unsafe extern fn function_copy(this: *mut scr::Function, other: *mut scr::Function) {
*other = *this;
}
unsafe extern fn read_file_wrap(file: *mut scr::FileHandle, out: *mut u8, size: u32) -> u32 {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).read.call3(read, out, size)
}
unsafe extern fn skip_wrap(file: *mut scr::FileHandle, size: u32) {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).skip.call2(read, size)
}
unsafe extern fn read_file(file: *mut scr::FileRead, out: *mut u8, size: u32) -> u32 {
let file = (*file).inner as *mut FileAllocation;
let buf = std::slice::from_raw_parts_mut(out, size as usize);
(*file).file.read(buf)
}
unsafe extern fn skip(file: *mut scr::FileRead, size: u32) {
let file = (*file).inner as *mut FileAllocation;
let pos = (*file).file.tell();
(*file).file.seek(pos.saturating_add(size));
}
unsafe extern fn peek_wrap(file: *mut c_void, out: *mut u8, size: u32) -> u32 {
let file = (file as usize - 4) as *mut scr::FileHandle;
let peek = (*file).peek | let c_path_for_switched_extension = match alt_extension.is_some() { | random_line_split |
file_hook.rs | 0x00, 0x00, 0x00, 0x00, 0x02, 0x10];
static DUMMY_SKINS: &[u8] = br#"{"skins":[]}"#;
fn check_dummied_out_hd(path: &[u8]) -> Option<&'static [u8]> {
if path.ends_with(b".anim") {
// Avoid touching tileset/foliage.anim
if path.starts_with(b"anim/") {
return Some(DUMMY_ANIM);
}
} else if path.ends_with(b".dds") {
// Font dds files are used (only) in SD, but they aren't loaded
// on file param SD.
if !path.starts_with(b"font/") {
// Anim happens to have a dds inside it :)
let dummy_dds = &DUMMY_ANIM[0x174..];
return Some(dummy_dds);
}
} else if path.ends_with(b".dds.vr4") {
return Some(DUMMY_DDSGRP);
} else if path.ends_with(b".dds.grp") {
// Avoid tileset.dds.grps, they need their frames
if path.starts_with(b"unit/") || path.starts_with(b"effect/") {
return Some(DUMMY_DDSGRP);
}
} else if path == b"anim/skins.json" {
return Some(DUMMY_SKINS);
}
None
}
/// If `params` has a file extension set, it will override whatever
/// extension `path` has.
///
/// Why it is done like that, I have no idea.
///
/// This function also normalizes to ascii lowercase and replaces any '\\' with '/'
unsafe fn real_path<'a>(
path: *const u8,
params: *const scr::OpenParams,
buffer: &'a mut ArrayVec<[u8; 256]>,
) -> Option<&'a [u8]> {
let c_path = CStr::from_ptr(path as *const i8);
let c_path = c_path.to_bytes();
let alt_extension = if (*params).extension.is_null() {
None
} else {
Some(CStr::from_ptr((*params).extension as *const i8))
};
let c_path_for_switched_extension = match alt_extension.is_some() {
true => match c_path.iter().rev().position(|&x| x == b'.') {
Some(period) => &c_path[..c_path.len() - period - 1],
None => c_path,
},
false => c_path,
};
if let Err(_) = buffer.try_extend_from_slice(c_path_for_switched_extension) {
return None;
}
if let Some(ext) = alt_extension {
if let Err(_) = buffer.try_extend_from_slice(ext.to_bytes()) {
return None;
}
}
let slice = &mut buffer[..];
for val in slice.iter_mut() {
match *val {
b'A' ..= b'Z' => {
*val = b'a' + (*val - b'A');
}
b'\\' => {
*val = b'/';
}
_ => (),
}
}
Some(slice)
}
unsafe fn | (buffer: &'static [u8], handle: *mut scr::FileHandle) {
let inner = Box::new(FileAllocation {
file: FileState {
buffer,
pos: 0,
},
read: scr::FileRead {
vtable: &*FILE_READ_VTABLE,
inner: null_mut(),
},
peek: scr::FilePeek {
vtable: &*FILE_PEEK_VTABLE,
inner: null_mut(),
},
metadata: scr::FileMetadata {
vtable: &*FILE_METADATA_VTABLE,
inner: null_mut(),
},
});
let inner_ptr = Box::into_raw(inner);
(*inner_ptr).metadata.inner = inner_ptr as *mut c_void;
(*inner_ptr).peek.inner = inner_ptr as *mut c_void;
(*inner_ptr).read.inner = inner_ptr as *mut c_void;
let close_callback = scr::Function {
vtable: &*FUNCTION_VTABLE,
inner: inner_ptr as *mut c_void,
};
*handle = scr::FileHandle {
vtable: &*FILE_HANDLE_VTABLE1,
vtable2: &*FILE_HANDLE_VTABLE2,
vtable3: &*FILE_HANDLE_VTABLE3,
metadata: &mut (*inner_ptr).metadata,
peek: &mut (*inner_ptr).peek,
read: &mut (*inner_ptr).read,
file_ok: 1,
close_callback,
};
}
struct FileAllocation {
file: FileState,
read: scr::FileRead,
peek: scr::FilePeek,
metadata: scr::FileMetadata,
}
struct FileState {
buffer: &'static [u8],
pos: u32,
}
lazy_static! {
static ref FILE_HANDLE_VTABLE1: scr::V_FileHandle1 = scr::V_FileHandle1 {
destroy: Thiscall::new(file_handle_destroy_nop),
read: Thiscall::new(read_file_wrap),
skip: Thiscall::new(skip_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE2: scr::V_FileHandle2 = scr::V_FileHandle2 {
unk0: [0; 1],
peek: Thiscall::new(peek_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_HANDLE_VTABLE3: scr::V_FileHandle3 = scr::V_FileHandle3 {
unk0: [0; 1],
tell: Thiscall::new(tell_wrap),
seek: Thiscall::new(seek_wrap),
file_size: Thiscall::new(file_size_wrap),
safety_padding: [0; 0x20],
};
static ref FILE_METADATA_VTABLE: scr::V_FileMetadata = scr::V_FileMetadata {
unk0: [0; 1],
tell: Thiscall::new(tell),
seek: Thiscall::new(seek),
file_size: Thiscall::new(file_size),
safety_padding: [0; 0x20],
};
static ref FILE_READ_VTABLE: scr::V_FileRead = scr::V_FileRead {
destroy: 0,
read: Thiscall::new(read_file),
skip: Thiscall::new(skip),
safety_padding: [0; 0x20],
};
static ref FILE_PEEK_VTABLE: scr::V_FilePeek = scr::V_FilePeek {
destroy: 0,
peek: Thiscall::new(peek),
safety_padding: [0; 0x20],
};
static ref FUNCTION_VTABLE: scr::V_Function = scr::V_Function {
destroy_inner: Thiscall::new(function_nop_destory),
invoke: Thiscall::new(close_file),
get_sizes: Thiscall::new(function_object_size),
copy: Thiscall::new(function_copy),
copy2: Thiscall::new(function_copy),
safety_padding: [0; 0x20],
};
}
unsafe extern fn file_handle_destroy_nop(_file: *mut scr::FileHandle, _dyn_free: u32) {
}
unsafe extern fn function_nop_destory(_file: *mut scr::Function, _unk: u32) {
}
unsafe extern fn function_object_size(
_file: *mut scr::Function,
size: *mut u32,
) {
*size = 0xc;
*size.add(1) = 0x4;
*(size.add(2) as *mut u8) = 0x1;
}
unsafe extern fn function_copy(this: *mut scr::Function, other: *mut scr::Function) {
*other = *this;
}
unsafe extern fn read_file_wrap(file: *mut scr::FileHandle, out: *mut u8, size: u32) -> u32 {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).read.call3(read, out, size)
}
unsafe extern fn skip_wrap(file: *mut scr::FileHandle, size: u32) {
let read = (*file).read;
let vtable = (*read).vtable;
(*vtable).skip.call2(read, size)
}
unsafe extern fn read_file(file: *mut scr::FileRead, out: *mut u8, size: u32) -> u32 {
let file = (*file).inner as *mut FileAllocation;
let buf = std::slice::from_raw_parts_mut(out, size as usize);
(*file).file.read(buf)
}
unsafe extern fn skip(file: *mut scr::FileRead, size: u32) {
let file = (*file).inner as *mut FileAllocation;
let pos = (*file).file.tell();
(*file).file.seek(pos.saturating_add(size));
}
unsafe extern fn peek_wrap(file: *mut c_void, out: *mut u8, size: u32) -> u32 {
let file = (file as usize - 4) as *mut scr::FileHandle;
let peek = (*file).peek | memory_buffer_to_bw_file_handle | identifier_name |
evaluate.ts | max = 6;
private localIds = [];
private serverIds = [];
private isweb = false;
private allowDelete: boolean = true;
private len = 0;
private imgUrl = [];
private WimgUrl = [];
constructor(public navCtrl: NavController,
public navParams: NavParams,
private httpService: HttpService,
private nativeService: NativeService,
private storage: Storage,
private viewCtrl: ViewController,
public alertCtrl: AlertController, ) {
this.goodslist = navParams.get("goodsDetail");
this.typenum = navParams.get("num");
this.goodsDetail = this.goodslist.Items[this.typenum];
this.storage.get("userinfo").then(val => {
// //console.info(val);
if (val != "") {
if (val.userinform.ID == "") {
this.navCtrl.push("LoginPage", { showflag: true });
} else {
this.userID = val.userinform.ID;
}
} else {
this.navCtrl.push("LoginPage", { showflag: true });
}
});
this.isweb = !this.nativeService.isMobile();
}
ionViewDidLoad() {
}
givestaar(i) {
this.list = [{ "statue": false }, { "statue": false }, { "statue": false }, { "statue": false }, { "statue": false },];
for (var n = 0; n < (i + 1); n++) {
this.list[n].statue = true;
};
this.num = (i + 1)
if (i == 0) {
this.status = "非常差";
} else if (i == 1) {
this.status = "差";
} else if (i == 2) {
this.status = "一般";
} else if (i == 3) {
this.status = "好";
} else if (i == 4) {
this.status = "非常好";
};
////console.info(i);
}
comment() {
if (this.textarea == '') {
this.nativeService.showToast("评论内容不能为空!");
return;
} else {
if (!this.isweb) {
if (this.fileObjList == null || this.fileObjList.length <= 0) {
this.login([]);
} else {
this.login(this.fileObjList);
// this.HaveImage = 1;
// var filearray: any[] = [];
// for (var i = 0; i < this.fileObjList.length; i++) {
// filearray.push(this.fileObjList[i].origPath);
// }
// this.nativeService.showLoading("提交中");
// this.nativeService.uploadFileByMutiTranser(filearray).then(val => {
// this.login(val);
// });
}
} else {
if (this.imgUrl == null || this.imgUrl.length <= 0) {
this.login([]);
} else {
this.login(this.imgUrl);
}
}
}
}
login(filearray: any[]) {
//console.info(this.fileObjList[0])
var userInfo = {
Userid: this.userID,
goodsID: this.goodsDetail.ProductID,
Detail: this.textarea,
anonymity: 0,
Degree: this.num,
orderID: this.goodslist.ID,
file: this.fileObjList,
HaveImage: this.HaveImage,
itemID: this.goodsDetail.itemID
};
//console.info(userInfo);
var parameter = new Parameter();
parameter.jyh = "1024";
parameter.parm = JSON.stringify(userInfo);
this.nativeService.showLoading("提交中");
this.httpService.post("/MobService/Index", parameter).map(res => res.json()).subscribe(res => {
this.nativeService.hideLoading();
var result = JSON.parse(res.result);
//console.info(result);
if (result.Code == 1) {
this.nativeService.showToast(result.Msg);
this.goodslist.Items[this.typenum].IsEvaluate = 1
this.viewCtrl.dismiss(this.goodslist);
} else {
this.nativeService.showToast(result.Msg);
}
});
}
slice(string) {
if (string != "" && string != null) {
return string.slice(0, 18) + "...";
}
}
closealert() {
let confirm = this.alertCtrl.create({
title: '是否放弃评价',
buttons: [
{
text: '否',
handler: () => {
//console.log('Disagree clicked');
}
},
{
text: '是',
handler: () => {
this.viewCtrl.dismiss();
}
}
]
});
confirm.present();
}
addWxPicture() {
var that = this;
wx.chooseImage({
count: that.max - that.len, // 默认9
sizeType: ['original', 'compressed'], // 可以指定是原图还是压缩图,默认二者都有
sourceType: ['album', 'camera'], // 可以指定来源是相册还是相机,默认二者都有
success: function (res) {
var localIds = res.localIds;
that.len = localIds.length; // 返回选定照片的本地ID列表,localId可以作为img标签的src属性显示图片
// for (var i = 0; i < localIds.length; i++) {
that.WxUpLoad(localIds);
// }
// wx.getLocalImgData({
// localId: localIds, // 图片的localID
// success: function (res) {
// var localData = res.localData;
// that.localIds = localData; // localData是图片的base64数据,可以用img标签显示
// }
// });
}
});
}
WxUpLoad(localIds: string[]) {
var that = this;
var localId = localIds.pop();
wx.uploadImage({
localId: localId,
success: function (res) {
var serverId = res.serverId;
that.serverIds.push(serverId);
//console.info("serverId》》》》》" + serverId)
if (localIds.length > 0) {
that.WxUpLoad(localIds);
} else {
// setTimeout(() => {
// that.input.setFocus();
// }, 500);
}
that.WxDownLoadImage(serverId);
// if (that.len == that.serverIds.length) {
// that.WxDownLoadImage(serverId);
// }
}
});
}
viewerWxPicture(item) {
alert(item);
var that = this;
wx.previewImage({
current: item, // 当前显示图片的http链接
urls: that.localIds // 需要预览的图片http链接列表
});
}
deleteWxPicture(i) {//删除照片
if (!this.allowDelete) {
return;
}
this.alertCtrl.create({
title: '确认删除?',
buttons: [{ text: '取消' },
{
text: '确定',
handler: () => {
// this.imgUrl.splice(i, 1);
// this.WimgUrl.splice(i, 1);
this.fileObjList.splice(i, 1);
// this.len--;
}
}
]
}).present();
}
WxDownLoadImage(serverId) {
this.nativeService.uploadWxFile(serverId).subscribe(res => {
var result = JSON.parse(res.result);
if (result.Code == "1") {
this.WimgUrl.push(fileViewPath + result.Msg);
this.imgUrl.push(result.Msg);
//console.info("成功" + this.WimgUrl)
| sole.info("不成功" + result)
});
}
getImg() {
if (!this.nativeService.isMobile()) {
this.addWxPicture();
}
}
onChange1(event: any) {
if(this.fileObjList.length<6){
}else{
this.nativeService.showToast("最多只能上传6张图片");
return
}
let files = event.target.files;
var file = files[0];
var size = files[0].size / 1024 > 1024 ? (~~(10 * files[0].size / 1024 / 1024)) / 10 + "MB" : ~~(files[0].size / 1024) + "KB";
this.nativeService.showLoading("获取中");
//console.info("大小" + size)
//console.info(file);
if (file == undefined) {
this.IDUrl = "";
//console.info("undefiend1")
} else {
//console.info("undefiend2")
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
var that = this;
fileReader.onload = function (e) {
var name: any;
name = e.target
that.IDUrl = name.result;
//console.info(that.IDUrl);
that.fileObjList.push(that.IDUrl);
// that.takeimg(name.result, 1);
}
}
this.nativeService.hideLoading();
}
takeimg(result, num) {
var result = result;
var that = this;
var maxSize = 100 * | }
//con | identifier_name |
evaluate.ts | max = 6;
private localIds = [];
private serverIds = [];
private isweb = false;
private allowDelete: boolean = true;
private len = 0;
private imgUrl = [];
private WimgUrl = [];
constructor(public navCtrl: NavController,
public navParams: NavParams,
private httpService: HttpService,
private nativeService: NativeService,
private storage: Storage,
private viewCtrl: ViewController,
public alertCtrl: AlertController, ) {
this.goodslist = navParams.get("goodsDetail");
this.typenum = navParams.get("num");
this.goodsDetail = this.goodslist.Items[this.typenum];
this.storage.get("userinfo").then(val => {
// //console.info(val);
if (val != "") {
if (val.userinform.ID == "") {
this.navCtrl.push("LoginPage", { showflag: true });
} else {
this.userID = val.userinform.ID;
}
} else {
this.navCtrl.push("LoginPage", { showflag: true });
}
});
this.isweb = !this.nativeService.isMobile();
}
ionViewDidLoad() |
givestaar(i) {
this.list = [{ "statue": false }, { "statue": false }, { "statue": false }, { "statue": false }, { "statue": false },];
for (var n = 0; n < (i + 1); n++) {
this.list[n].statue = true;
};
this.num = (i + 1)
if (i == 0) {
this.status = "非常差";
} else if (i == 1) {
this.status = "差";
} else if (i == 2) {
this.status = "一般";
} else if (i == 3) {
this.status = "好";
} else if (i == 4) {
this.status = "非常好";
};
////console.info(i);
}
comment() {
if (this.textarea == '') {
this.nativeService.showToast("评论内容不能为空!");
return;
} else {
if (!this.isweb) {
if (this.fileObjList == null || this.fileObjList.length <= 0) {
this.login([]);
} else {
this.login(this.fileObjList);
// this.HaveImage = 1;
// var filearray: any[] = [];
// for (var i = 0; i < this.fileObjList.length; i++) {
// filearray.push(this.fileObjList[i].origPath);
// }
// this.nativeService.showLoading("提交中");
// this.nativeService.uploadFileByMutiTranser(filearray).then(val => {
// this.login(val);
// });
}
} else {
if (this.imgUrl == null || this.imgUrl.length <= 0) {
this.login([]);
} else {
this.login(this.imgUrl);
}
}
}
}
login(filearray: any[]) {
//console.info(this.fileObjList[0])
var userInfo = {
Userid: this.userID,
goodsID: this.goodsDetail.ProductID,
Detail: this.textarea,
anonymity: 0,
Degree: this.num,
orderID: this.goodslist.ID,
file: this.fileObjList,
HaveImage: this.HaveImage,
itemID: this.goodsDetail.itemID
};
//console.info(userInfo);
var parameter = new Parameter();
parameter.jyh = "1024";
parameter.parm = JSON.stringify(userInfo);
this.nativeService.showLoading("提交中");
this.httpService.post("/MobService/Index", parameter).map(res => res.json()).subscribe(res => {
this.nativeService.hideLoading();
var result = JSON.parse(res.result);
//console.info(result);
if (result.Code == 1) {
this.nativeService.showToast(result.Msg);
this.goodslist.Items[this.typenum].IsEvaluate = 1
this.viewCtrl.dismiss(this.goodslist);
} else {
this.nativeService.showToast(result.Msg);
}
});
}
slice(string) {
if (string != "" && string != null) {
return string.slice(0, 18) + "...";
}
}
closealert() {
let confirm = this.alertCtrl.create({
title: '是否放弃评价',
buttons: [
{
text: '否',
handler: () => {
//console.log('Disagree clicked');
}
},
{
text: '是',
handler: () => {
this.viewCtrl.dismiss();
}
}
]
});
confirm.present();
}
addWxPicture() {
var that = this;
wx.chooseImage({
count: that.max - that.len, // 默认9
sizeType: ['original', 'compressed'], // 可以指定是原图还是压缩图,默认二者都有
sourceType: ['album', 'camera'], // 可以指定来源是相册还是相机,默认二者都有
success: function (res) {
var localIds = res.localIds;
that.len = localIds.length; // 返回选定照片的本地ID列表,localId可以作为img标签的src属性显示图片
// for (var i = 0; i < localIds.length; i++) {
that.WxUpLoad(localIds);
// }
// wx.getLocalImgData({
// localId: localIds, // 图片的localID
// success: function (res) {
// var localData = res.localData;
// that.localIds = localData; // localData是图片的base64数据,可以用img标签显示
// }
// });
}
});
}
WxUpLoad(localIds: string[]) {
var that = this;
var localId = localIds.pop();
wx.uploadImage({
localId: localId,
success: function (res) {
var serverId = res.serverId;
that.serverIds.push(serverId);
//console.info("serverId》》》》》" + serverId)
if (localIds.length > 0) {
that.WxUpLoad(localIds);
} else {
// setTimeout(() => {
// that.input.setFocus();
// }, 500);
}
that.WxDownLoadImage(serverId);
// if (that.len == that.serverIds.length) {
// that.WxDownLoadImage(serverId);
// }
}
});
}
viewerWxPicture(item) {
alert(item);
var that = this;
wx.previewImage({
current: item, // 当前显示图片的http链接
urls: that.localIds // 需要预览的图片http链接列表
});
}
deleteWxPicture(i) {//删除照片
if (!this.allowDelete) {
return;
}
this.alertCtrl.create({
title: '确认删除?',
buttons: [{ text: '取消' },
{
text: '确定',
handler: () => {
// this.imgUrl.splice(i, 1);
// this.WimgUrl.splice(i, 1);
this.fileObjList.splice(i, 1);
// this.len--;
}
}
]
}).present();
}
WxDownLoadImage(serverId) {
this.nativeService.uploadWxFile(serverId).subscribe(res => {
var result = JSON.parse(res.result);
if (result.Code == "1") {
this.WimgUrl.push(fileViewPath + result.Msg);
this.imgUrl.push(result.Msg);
//console.info("成功" + this.WimgUrl)
}
//console.info("不成功" + result)
});
}
getImg() {
if (!this.nativeService.isMobile()) {
this.addWxPicture();
}
}
onChange1(event: any) {
if(this.fileObjList.length<6){
}else{
this.nativeService.showToast("最多只能上传6张图片");
return
}
let files = event.target.files;
var file = files[0];
var size = files[0].size / 1024 > 1024 ? (~~(10 * files[0].size / 1024 / 1024)) / 10 + "MB" : ~~(files[0].size / 1024) + "KB";
this.nativeService.showLoading("获取中");
//console.info("大小" + size)
//console.info(file);
if (file == undefined) {
this.IDUrl = "";
//console.info("undefiend1")
} else {
//console.info("undefiend2")
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
var that = this;
fileReader.onload = function (e) {
var name: any;
name = e.target
that.IDUrl = name.result;
//console.info(that.IDUrl);
that.fileObjList.push(that.IDUrl);
// that.takeimg(name.result, 1);
}
}
this.nativeService.hideLoading();
}
takeimg(result, num) {
var result = result;
var that = this;
var maxSize = 100 * | {
} | identifier_body |
evaluate.ts | private max = 6;
private localIds = [];
private serverIds = [];
private isweb = false;
private allowDelete: boolean = true;
private len = 0;
private imgUrl = [];
private WimgUrl = [];
constructor(public navCtrl: NavController,
public navParams: NavParams,
private httpService: HttpService,
private nativeService: NativeService,
private storage: Storage,
private viewCtrl: ViewController,
public alertCtrl: AlertController, ) {
this.goodslist = navParams.get("goodsDetail");
this.typenum = navParams.get("num");
this.goodsDetail = this.goodslist.Items[this.typenum];
this.storage.get("userinfo").then(val => {
// //console.info(val);
if (val != "") {
if (val.userinform.ID == "") {
this.navCtrl.push("LoginPage", { showflag: true });
} else {
this.userID = val.userinform.ID;
}
} else {
this.navCtrl.push("LoginPage", { showflag: true });
}
});
this.isweb = !this.nativeService.isMobile();
}
ionViewDidLoad() {
}
givestaar(i) {
this.list = [{ "statue": false }, { "statue": false }, { "statue": false }, { "statue": false }, { "statue": false },];
for (var n = 0; n < (i + 1); n++) {
this.list[n].statue = true;
};
this.num = (i + 1)
if (i == 0) {
this.status = "非常差";
} else if (i == 1) {
this.status = "差";
} else if (i == 2) {
this.status = "一般";
} else if (i == 3) {
this.status = "好";
} else if (i == 4) {
this.status = "非常好";
};
////console.info(i);
}
comment() {
if (this.textarea == '') {
this.nativeService.showToast("评论内容不能为空!");
return;
} else {
if (!this.isweb) {
if (this.fileObjList == null || this.fileObjList.length <= 0) {
this.login([]);
} else {
this.login(this.fileObjList);
// this.HaveImage = 1;
// var filearray: any[] = [];
// for (var i = 0; i < this.fileObjList.length; i++) {
// filearray.push(this.fileObjList[i].origPath);
// }
// this.nativeService.showLoading("提交中");
// this.nativeService.uploadFileByMutiTranser(filearray).then(val => {
// this.login(val);
// });
}
} else {
if (this.imgUrl == null || this.imgUrl.length <= 0) {
this.login([]);
} else {
this.login(this.imgUrl);
}
}
}
}
login(filearray: any[]) {
//console.info(this.fileObjList[0])
var userInfo = {
Userid: this.userID,
goodsID: this.goodsDetail.ProductID,
Detail: this.textarea,
anonymity: 0,
Degree: this.num,
orderID: this.goodslist.ID,
file: this.fileObjList,
HaveImage: this.HaveImage,
itemID: this.goodsDetail.itemID
};
//console.info(userInfo);
var parameter = new Parameter();
parameter.jyh = "1024";
parameter.parm = JSON.stringify(userInfo);
this.nativeService.showLoading("提交中");
this.httpService.post("/MobService/Index", parameter).map(res => res.json()).subscribe(res => {
this.nativeService.hideLoading();
var result = JSON.parse(res.result);
//console.info(result);
if (result.Code == 1) {
this.nativeService.showToast(result.Msg);
this.goodslist.Items[this.typenum].IsEvaluate = 1
this.viewCtrl.dismiss(this.goodslist);
} else {
this.nativeService.showToast(result.Msg);
}
});
}
slice(string) {
if (string != "" && string != null) {
return string.slice(0, 18) + "...";
}
}
closealert() {
let confirm = this.alertCtrl.create({
title: '是否放弃评价',
buttons: [
{
text: '否',
handler: () => {
//console.log('Disagree clicked');
}
},
{
text: '是',
handler: () => {
this.viewCtrl.dismiss();
}
}
]
});
confirm.present();
}
addWxPicture() {
var that = this;
wx.chooseImage({
count: that.max - that.len, // 默认9
sizeType: ['original', 'compressed'], // 可以指定是原图还是压缩图,默认二者都有 | // for (var i = 0; i < localIds.length; i++) {
that.WxUpLoad(localIds);
// }
// wx.getLocalImgData({
// localId: localIds, // 图片的localID
// success: function (res) {
// var localData = res.localData;
// that.localIds = localData; // localData是图片的base64数据,可以用img标签显示
// }
// });
}
});
}
WxUpLoad(localIds: string[]) {
var that = this;
var localId = localIds.pop();
wx.uploadImage({
localId: localId,
success: function (res) {
var serverId = res.serverId;
that.serverIds.push(serverId);
//console.info("serverId》》》》》" + serverId)
if (localIds.length > 0) {
that.WxUpLoad(localIds);
} else {
// setTimeout(() => {
// that.input.setFocus();
// }, 500);
}
that.WxDownLoadImage(serverId);
// if (that.len == that.serverIds.length) {
// that.WxDownLoadImage(serverId);
// }
}
});
}
viewerWxPicture(item) {
alert(item);
var that = this;
wx.previewImage({
current: item, // 当前显示图片的http链接
urls: that.localIds // 需要预览的图片http链接列表
});
}
deleteWxPicture(i) {//删除照片
if (!this.allowDelete) {
return;
}
this.alertCtrl.create({
title: '确认删除?',
buttons: [{ text: '取消' },
{
text: '确定',
handler: () => {
// this.imgUrl.splice(i, 1);
// this.WimgUrl.splice(i, 1);
this.fileObjList.splice(i, 1);
// this.len--;
}
}
]
}).present();
}
WxDownLoadImage(serverId) {
this.nativeService.uploadWxFile(serverId).subscribe(res => {
var result = JSON.parse(res.result);
if (result.Code == "1") {
this.WimgUrl.push(fileViewPath + result.Msg);
this.imgUrl.push(result.Msg);
//console.info("成功" + this.WimgUrl)
}
//console.info("不成功" + result)
});
}
getImg() {
if (!this.nativeService.isMobile()) {
this.addWxPicture();
}
}
onChange1(event: any) {
if(this.fileObjList.length<6){
}else{
this.nativeService.showToast("最多只能上传6张图片");
return
}
let files = event.target.files;
var file = files[0];
var size = files[0].size / 1024 > 1024 ? (~~(10 * files[0].size / 1024 / 1024)) / 10 + "MB" : ~~(files[0].size / 1024) + "KB";
this.nativeService.showLoading("获取中");
//console.info("大小" + size)
//console.info(file);
if (file == undefined) {
this.IDUrl = "";
//console.info("undefiend1")
} else {
//console.info("undefiend2")
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
var that = this;
fileReader.onload = function (e) {
var name: any;
name = e.target
that.IDUrl = name.result;
//console.info(that.IDUrl);
that.fileObjList.push(that.IDUrl);
// that.takeimg(name.result, 1);
}
}
this.nativeService.hideLoading();
}
takeimg(result, num) {
var result = result;
var that = this;
var maxSize = 100 * 102 | sourceType: ['album', 'camera'], // 可以指定来源是相册还是相机,默认二者都有
success: function (res) {
var localIds = res.localIds;
that.len = localIds.length; // 返回选定照片的本地ID列表,localId可以作为img标签的src属性显示图片 | random_line_split |
evaluate.ts | max = 6;
private localIds = [];
private serverIds = [];
private isweb = false;
private allowDelete: boolean = true;
private len = 0;
private imgUrl = [];
private WimgUrl = [];
constructor(public navCtrl: NavController,
public navParams: NavParams,
private httpService: HttpService,
private nativeService: NativeService,
private storage: Storage,
private viewCtrl: ViewController,
public alertCtrl: AlertController, ) {
this.goodslist = navParams.get("goodsDetail");
this.typenum = navParams.get("num");
this.goodsDetail = this.goodslist.Items[this.typenum];
this.storage.get("userinfo").then(val => {
// //console.info(val);
if (val != "") {
if (val.userinform.ID == "") {
this.navCtrl.push("LoginPage", { showflag: true });
} else {
this.userID = val.userinform.ID;
}
} else {
this.navCtrl.push("LoginPage", { showflag: true });
}
});
this.isweb = !this.nativeService.isMobile();
}
ionViewDidLoad() {
}
givestaar(i) {
this.list = [{ "statue": false }, { "statue": false }, { "statue": false }, { "statue": false }, { "statue": false },];
for (var n = 0; n < (i + 1); n++) {
this.list[n].statue = true;
};
this.num = (i + 1)
if (i == 0) {
this.status = "非常差";
} else if (i == 1) {
this.status = "差";
} else if (i == 2) {
this.status = "一般";
} else if (i == 3) {
this.status = "好";
} else if (i == 4) {
this.status = "非常好";
};
////console.info(i);
}
comment() {
if (this.textarea == '') {
this.nativeService.showToast("评论内容不能为空!");
return;
} else {
if (!this.isweb) {
if (this.fileObjList == null || this.fileObjList.length <= 0) {
this.login([]);
} else {
this.login(this.fileObjList);
// this.HaveImage = 1;
// var filearray: any[] = [];
// for (var i = 0; i < this.fileObjList.length; i++) {
// filearray.push(this.fileObjList[i].origPath);
// }
// this.nativeService.showLoading("提交中");
// this.nativeService.uploadFileByMutiTranser(filearray).then(val => {
// this.login(val);
// });
}
} else {
if (this.imgUrl == null || this.imgUrl.length <= 0) {
this.login([]);
} else { | }
}
}
}
login(filearray: any[]) {
//console.info(this.fileObjList[0])
var userInfo = {
Userid: this.userID,
goodsID: this.goodsDetail.ProductID,
Detail: this.textarea,
anonymity: 0,
Degree: this.num,
orderID: this.goodslist.ID,
file: this.fileObjList,
HaveImage: this.HaveImage,
itemID: this.goodsDetail.itemID
};
//console.info(userInfo);
var parameter = new Parameter();
parameter.jyh = "1024";
parameter.parm = JSON.stringify(userInfo);
this.nativeService.showLoading("提交中");
this.httpService.post("/MobService/Index", parameter).map(res => res.json()).subscribe(res => {
this.nativeService.hideLoading();
var result = JSON.parse(res.result);
//console.info(result);
if (result.Code == 1) {
this.nativeService.showToast(result.Msg);
this.goodslist.Items[this.typenum].IsEvaluate = 1
this.viewCtrl.dismiss(this.goodslist);
} else {
this.nativeService.showToast(result.Msg);
}
});
}
slice(string) {
if (string != "" && string != null) {
return string.slice(0, 18) + "...";
}
}
closealert() {
let confirm = this.alertCtrl.create({
title: '是否放弃评价',
buttons: [
{
text: '否',
handler: () => {
//console.log('Disagree clicked');
}
},
{
text: '是',
handler: () => {
this.viewCtrl.dismiss();
}
}
]
});
confirm.present();
}
addWxPicture() {
var that = this;
wx.chooseImage({
count: that.max - that.len, // 默认9
sizeType: ['original', 'compressed'], // 可以指定是原图还是压缩图,默认二者都有
sourceType: ['album', 'camera'], // 可以指定来源是相册还是相机,默认二者都有
success: function (res) {
var localIds = res.localIds;
that.len = localIds.length; // 返回选定照片的本地ID列表,localId可以作为img标签的src属性显示图片
// for (var i = 0; i < localIds.length; i++) {
that.WxUpLoad(localIds);
// }
// wx.getLocalImgData({
// localId: localIds, // 图片的localID
// success: function (res) {
// var localData = res.localData;
// that.localIds = localData; // localData是图片的base64数据,可以用img标签显示
// }
// });
}
});
}
WxUpLoad(localIds: string[]) {
var that = this;
var localId = localIds.pop();
wx.uploadImage({
localId: localId,
success: function (res) {
var serverId = res.serverId;
that.serverIds.push(serverId);
//console.info("serverId》》》》》" + serverId)
if (localIds.length > 0) {
that.WxUpLoad(localIds);
} else {
// setTimeout(() => {
// that.input.setFocus();
// }, 500);
}
that.WxDownLoadImage(serverId);
// if (that.len == that.serverIds.length) {
// that.WxDownLoadImage(serverId);
// }
}
});
}
viewerWxPicture(item) {
alert(item);
var that = this;
wx.previewImage({
current: item, // 当前显示图片的http链接
urls: that.localIds // 需要预览的图片http链接列表
});
}
deleteWxPicture(i) {//删除照片
if (!this.allowDelete) {
return;
}
this.alertCtrl.create({
title: '确认删除?',
buttons: [{ text: '取消' },
{
text: '确定',
handler: () => {
// this.imgUrl.splice(i, 1);
// this.WimgUrl.splice(i, 1);
this.fileObjList.splice(i, 1);
// this.len--;
}
}
]
}).present();
}
WxDownLoadImage(serverId) {
this.nativeService.uploadWxFile(serverId).subscribe(res => {
var result = JSON.parse(res.result);
if (result.Code == "1") {
this.WimgUrl.push(fileViewPath + result.Msg);
this.imgUrl.push(result.Msg);
//console.info("成功" + this.WimgUrl)
}
//console.info("不成功" + result)
});
}
getImg() {
if (!this.nativeService.isMobile()) {
this.addWxPicture();
}
}
onChange1(event: any) {
if(this.fileObjList.length<6){
}else{
this.nativeService.showToast("最多只能上传6张图片");
return
}
let files = event.target.files;
var file = files[0];
var size = files[0].size / 1024 > 1024 ? (~~(10 * files[0].size / 1024 / 1024)) / 10 + "MB" : ~~(files[0].size / 1024) + "KB";
this.nativeService.showLoading("获取中");
//console.info("大小" + size)
//console.info(file);
if (file == undefined) {
this.IDUrl = "";
//console.info("undefiend1")
} else {
//console.info("undefiend2")
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
var that = this;
fileReader.onload = function (e) {
var name: any;
name = e.target
that.IDUrl = name.result;
//console.info(that.IDUrl);
that.fileObjList.push(that.IDUrl);
// that.takeimg(name.result, 1);
}
}
this.nativeService.hideLoading();
}
takeimg(result, num) {
var result = result;
var that = this;
var maxSize = 100 * |
this.login(this.imgUrl);
| conditional_block |
activity.py | ("activityobj", idx+1, c)
with open(dataset_json_path) as dataset_json_file:
self.json_data = json.load(dataset_json_file)
print("Elements in the json file:", str(len(self.json_data)))
for image_path, masks in self.json_data.items():
image = skimage.io.imread(image_path)
height, width = image.shape[:2]
self.add_image("activityobj",
image_id=image_path, # use file path as a unique image id
path=image_path,
width=width, height=height)
def load_mask(self, image_id, coco_offset=0):
"""Generate instance masks for an image.
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
info = self.image_info[image_id]
mask = np.zeros([info["height"], info["width"], len(self.json_data[info["id"]])], dtype=np.uint8)
lbls = np.zeros(len(self.json_data[info["id"]]), dtype=np.int32)
for idx, (mask_path, mask_info) in enumerate(self.json_data[info["id"]].items()):
mask_class = mask_info["class"]
mask[:,:,idx] = np.array(PIL.Image.open(mask_path), dtype=np.uint8)
lbls[idx] = common.activity_classes_names.index(mask_class) + 1 + coco_offset
# Return mask, and array of class IDs of each instance. Since we have
# one class ID only, we return an array of 1s
return mask.astype(np.bool), lbls
class ExtendedCocoDataset(ActivityDataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None, class_names=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Select class_ids from class_names:
if class_names:
class_ids = sorted(coco.getCatIds(catNms=class_names))
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
imgs = [] # list of images to add to image_ids
# Select at most COCO_IMAGES_PER_OBJECT and select only the images
# that have at most COCO_MAX_NUM_MASK_PER_IMAGE masks inside them:
for imgid in list(coco.getImgIds(catIds=[id])):
if len(imgs) >= COCO_IMAGES_PER_OBJECT:
break
if len(coco.loadAnns(coco.getAnnIds(imgIds=[imgid], catIds=class_ids, iscrowd=None))) <= COCO_MAX_NUM_MASK_PER_IMAGE:
imgs.append(imgid)
image_ids.extend(imgs)
#image_ids.extend(list(coco.getImgIds(catIds=[id]))[:COCO_IMAGES_PER_OBJECT])
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
#print(len(coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None))))
|
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
| self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None))) | conditional_block |
activity.py | # Import Mask RCNN
#sys.path.append(ROOT_DIR) # To find local version of the library
sys.path.append("third_party/Mask_RCNN/") # To find local version of the library
from mrcnn.config import Config
from mrcnn import model as modellib, utils
import common
#from train import ActivityConfig, ActivityDataset
# Path to trained weights file
#COCO_MODEL_PATH = os.path.join(ROOT_DIR, "weights/mask_rcnn_coco.h5")
COCO_MODEL_PATH = "weights/mask_rcnn_coco.h5"
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
#DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
DEFAULT_LOGS_DIR = "logs"
DEFAULT_DATASET_YEAR = "2014"
COCO_IMAGES_PER_OBJECT = 40
COCO_MAX_NUM_MASK_PER_IMAGE = 3
############################################################
# Configurations
############################################################
class ActivityConfig(Config):
"""Configuration for training on the toy dataset.
Derives from the base Config class and overrides some values.
"""
# Give the configuration a recognizable name
NAME = "activityobj"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 2
# Number of classes (including background)
#NUM_CLASSES = 1 + 1 # Background + balloon
NUM_CLASSES = 1 + common.ACTIVITY_NUM_CLASSES
# Number of training steps per epoch
STEPS_PER_EPOCH = 20
# Skip detections with < 90% confidence
DETECTION_MIN_CONFIDENCE = 0.9
class ExtendedCocoConfig(Config):
"""Configuration for training on MS COCO.
Derives from the base Config class and overrides values specific
to the COCO dataset.
"""
# Give the configuration a recognizable name
NAME = "coco"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 2
# Uncomment to train on 8 GPUs (default is 1)
# GPU_COUNT = 8
# Number of classes (including background)
NUM_CLASSES = 1 + common.COCO_NUM_CLASSES + common.ACTIVITY_NUM_CLASSES
# Number of training steps per epoch
STEPS_PER_EPOCH = 30
class ActivityInferenceConfig(ActivityConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
DETECTION_MIN_CONFIDENCE = 0
class ExtendedInferenceConfig(ExtendedCocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
DETECTION_MIN_CONFIDENCE = 0
############################################################
# Dataset
############################################################
class ActivityDataset(utils.Dataset):
def load_activity(self, dataset_json_path):
for idx, c in enumerate(common.activity_classes_names):
self.add_class("activityobj", idx+1, c)
with open(dataset_json_path) as dataset_json_file:
self.json_data = json.load(dataset_json_file)
print("Elements in the json file:", str(len(self.json_data)))
for image_path, masks in self.json_data.items():
image = skimage.io.imread(image_path)
height, width = image.shape[:2]
self.add_image("activityobj",
image_id=image_path, # use file path as a unique image id
path=image_path,
width=width, height=height)
def load_mask(self, image_id, coco_offset=0):
"""Generate instance masks for an image.
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
info = self.image_info[image_id]
mask = np.zeros([info["height"], info["width"], len(self.json_data[info["id"]])], dtype=np.uint8)
lbls = np.zeros(len(self.json_data[info["id"]]), dtype=np.int32)
for idx, (mask_path, mask_info) in enumerate(self.json_data[info["id"]].items()):
mask_class = mask_info["class"]
mask[:,:,idx] = np.array(PIL.Image.open(mask_path), dtype=np.uint8)
lbls[idx] = common.activity_classes_names.index(mask_class) + 1 + coco_offset
# Return mask, and array of class IDs of each instance. Since we have
# one class ID only, we return an array of 1s
return mask.astype(np.bool), lbls
class ExtendedCocoDataset(ActivityDataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None, class_names=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Select class_ids from class_names:
if class_names:
class_ids = sorted(coco.getCatIds(catNms=class_names))
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
imgs = [] # list of images to add to image_ids
# Select at most COCO_IMAGES_PER_OBJECT and select only the images
# that have at most COCO_MAX_NUM_MASK_PER_IMAGE masks inside them:
for imgid in list(coco.getImgIds(catIds=[id])):
if len(imgs) >= COCO_IMAGES_PER_OBJECT:
break
if len(coco.loadAnns(coco.getAnnIds(imgIds=[imgid], catIds=class_ids, iscrowd=None))) <= COCO_MAX_NUM_MASK_PER_IMAGE:
imgs.append(imgid)
image_ids.extend(imgs)
#image_ids.extend(list(coco.getImgIds(catIds=[id]))[:COCO_IMAGES_PER_OBJECT])
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
#print(len(coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None))))
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType | random_line_split |
||
activity.py | ["width"], len(self.json_data[info["id"]])], dtype=np.uint8)
lbls = np.zeros(len(self.json_data[info["id"]]), dtype=np.int32)
for idx, (mask_path, mask_info) in enumerate(self.json_data[info["id"]].items()):
mask_class = mask_info["class"]
mask[:,:,idx] = np.array(PIL.Image.open(mask_path), dtype=np.uint8)
lbls[idx] = common.activity_classes_names.index(mask_class) + 1 + coco_offset
# Return mask, and array of class IDs of each instance. Since we have
# one class ID only, we return an array of 1s
return mask.astype(np.bool), lbls
class ExtendedCocoDataset(ActivityDataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None, class_names=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Select class_ids from class_names:
if class_names:
class_ids = sorted(coco.getCatIds(catNms=class_names))
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
imgs = [] # list of images to add to image_ids
# Select at most COCO_IMAGES_PER_OBJECT and select only the images
# that have at most COCO_MAX_NUM_MASK_PER_IMAGE masks inside them:
for imgid in list(coco.getImgIds(catIds=[id])):
if len(imgs) >= COCO_IMAGES_PER_OBJECT:
break
if len(coco.loadAnns(coco.getAnnIds(imgIds=[imgid], catIds=class_ids, iscrowd=None))) <= COCO_MAX_NUM_MASK_PER_IMAGE:
imgs.append(imgid)
image_ids.extend(imgs)
#image_ids.extend(list(coco.getImgIds(catIds=[id]))[:COCO_IMAGES_PER_OBJECT])
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
#print(len(coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None))))
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def load_mask(self, image_id):
| """Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a COCO image, delegate to parent class.
image_info = self.image_info[image_id]
if image_info["source"] != "coco":
return super(ExtendedCocoDataset, self).load_mask(image_id, common.COCO_NUM_CLASSES) # NOTE: this calls ActivityDataset.load_mask()
instance_masks = []
class_ids = []
annotations = self.image_info[image_id]["annotations"]
# Build mask of shape [height, width, instance_count] and list | identifier_body |
|
activity.py | ("activityobj", idx+1, c)
with open(dataset_json_path) as dataset_json_file:
self.json_data = json.load(dataset_json_file)
print("Elements in the json file:", str(len(self.json_data)))
for image_path, masks in self.json_data.items():
image = skimage.io.imread(image_path)
height, width = image.shape[:2]
self.add_image("activityobj",
image_id=image_path, # use file path as a unique image id
path=image_path,
width=width, height=height)
def load_mask(self, image_id, coco_offset=0):
"""Generate instance masks for an image.
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
info = self.image_info[image_id]
mask = np.zeros([info["height"], info["width"], len(self.json_data[info["id"]])], dtype=np.uint8)
lbls = np.zeros(len(self.json_data[info["id"]]), dtype=np.int32)
for idx, (mask_path, mask_info) in enumerate(self.json_data[info["id"]].items()):
mask_class = mask_info["class"]
mask[:,:,idx] = np.array(PIL.Image.open(mask_path), dtype=np.uint8)
lbls[idx] = common.activity_classes_names.index(mask_class) + 1 + coco_offset
# Return mask, and array of class IDs of each instance. Since we have
# one class ID only, we return an array of 1s
return mask.astype(np.bool), lbls
class ExtendedCocoDataset(ActivityDataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None, class_names=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Select class_ids from class_names:
if class_names:
class_ids = sorted(coco.getCatIds(catNms=class_names))
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
imgs = [] # list of images to add to image_ids
# Select at most COCO_IMAGES_PER_OBJECT and select only the images
# that have at most COCO_MAX_NUM_MASK_PER_IMAGE masks inside them:
for imgid in list(coco.getImgIds(catIds=[id])):
if len(imgs) >= COCO_IMAGES_PER_OBJECT:
break
if len(coco.loadAnns(coco.getAnnIds(imgIds=[imgid], catIds=class_ids, iscrowd=None))) <= COCO_MAX_NUM_MASK_PER_IMAGE:
imgs.append(imgid)
image_ids.extend(imgs)
#image_ids.extend(list(coco.getImgIds(catIds=[id]))[:COCO_IMAGES_PER_OBJECT])
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
#print(len(coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None))))
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def | (self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
| auto_download | identifier_name |
net.rs | /// session: Session::load_or_create("hello-world.session")?,
/// api_id: API_ID,
/// api_hash: API_HASH.to_string(),
/// params: Default::default(),
/// }).await?;
/// # Ok(())
/// # }
/// ```
pub async fn connect(mut config: Config) -> Result<Self, AuthorizationError> {
let sender = connect_sender(config.session.user_dc.unwrap_or(0), &mut config).await?;
// TODO Sender doesn't have a way to handle backpressure yet
let (handle_tx, handle_rx) = mpsc::unbounded_channel();
Ok(Self {
sender,
config,
handle_tx,
handle_rx,
})
}
/// Invoke a raw API call without the need to use a [`Client::handle`] or having to repeatedly
/// call [`Client::step`]. This directly sends the request to Telegram's servers.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
self.sender.invoke(request).await
}
/// Return a new [`ClientHandle`] that can be used to invoke remote procedure calls.
///
/// # Examples
///
/// ```
/// use tokio::task;
///
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Obtain a handle. After this you can obtain more by using `client_handle.clone()`.
/// let mut client_handle = client.handle();
///
/// // Run the network loop. This is necessary, or no network events will be processed!
/// let network_handle = task::spawn(async move { client.run_until_disconnected().await });
///
/// // Use the `client_handle` to your heart's content, maybe you just want to disconnect:
/// client_handle.disconnect().await;
///
/// // Joining on the spawned task lets us access the result from `run_until_disconnected`,
/// // so we can verify everything went fine. You could also just drop this though.
/// network_handle.await?;
/// # Ok(())
/// # }
///
pub fn handle(&self) -> ClientHandle {
ClientHandle {
tx: self.handle_tx.clone(),
}
}
/// Perform a single network step or processing of incoming requests via handles.
///
/// If a server message is received, requests enqueued via the [`ClientHandle`]s may have
/// their result delivered via a channel, and a (possibly empty) list of updates will be
/// returned.
///
/// The other return values are graceful disconnection, or a read error.
///
/// Most commonly, you will want to use the higher-level abstraction [`Client::next_updates`]
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_client::NetworkStep;
///
/// loop {
/// // Process network events forever until we gracefully disconnect or get an error.
/// match client.step().await? {
/// NetworkStep::Connected { .. } => continue,
/// NetworkStep::Disconnected => break,
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub async fn step(&mut self) -> Result<Step, sender::ReadError> {
let (network, request) = {
let network = self.sender.step();
let request = self.handle_rx.recv();
pin_mut!(network);
pin_mut!(request);
match future::select(network, request).await {
future::Either::Left((network, request)) => {
let request = request.now_or_never();
(Some(network), request)
}
future::Either::Right((request, network)) => {
let network = network.now_or_never();
(network, Some(request))
}
}
};
if let Some(request) = request {
let request = request.expect("mpsc returned None");
match request {
Request::Rpc { request, response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(self.sender.enqueue_body(request)));
}
Request::Disconnect { response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(()));
return Ok(Step::Disconnected);
}
}
}
// TODO request cancellation if this is Err
// (perhaps a method on the sender to cancel_all)
Ok(Step::Connected {
updates: if let Some(updates) = network {
updates?
} else {
Vec::new()
},
})
}
/// Run the client by repeatedly calling [`Client::step`] until a graceful disconnection
/// occurs, or a network error occurs. Incoming updates are ignored and simply dropped.
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// client.run_until_disconnected().await?;
/// # Ok(())
/// # }
/// ```
pub async fn run_until_disconnected(mut self) -> Result<(), sender::ReadError> {
loop {
match self.step().await? {
Step::Connected { .. } => continue,
Step::Disconnected => break Ok(()),
}
}
}
}
/// Method implementations directly related with network connectivity.
impl ClientHandle {
/// Invoke a raw API call.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::ClientHandle) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
let (response, rx) = oneshot::channel();
// TODO add a test this (using handle with client dropped)
if let Err(_) = self.tx.send(Request::Rpc {
request: request.to_bytes(),
response,
}) {
// `Client` was dropped, can no longer send requests
return Err(InvocationError::Dropped);
}
// First receive the `oneshot::Receiver` with from the `Client`,
// then `await` on that to receive the response body for the request.
if let Ok(response) = rx.await {
if let Ok(result) = response.await {
match result {
Ok(body) => R::Return::from_bytes(&body).map_err(|e| e.into()),
Err(e) => Err(e),
}
} else {
// `Sender` dropped, won't be receiving a response for this
Err(InvocationError::Dropped)
}
} else {
// `Client` dropped, won't be receiving a response for this
Err(InvocationError::Dropped)
}
}
/// Gracefully tell the [`Client`] that created this handle to disconnect and stop receiving
/// things from the network.
///
/// If the client has already been dropped (and thus disconnected), this method does nothing.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::ClientHandle) {
/// client.disconnect().await;
/// # }
/// ```
pub async fn disconnect(&mut self) {
let (response, rx) = oneshot::channel();
if let Ok(_) = self.tx.send(Request::Disconnect { response }) {
// It's fine to drop errors here, it means the channel was dropped by the `Client`.
drop(rx.await);
} else | {
// `Client` is already dropped, no need to disconnect again.
} | conditional_block |
|
net.rs | longer knows about it)
// TODO all up-to-date server addresses should be stored in the session for future initial connections
let _remote_config = sender
.invoke(&tl::functions::InvokeWithLayer {
layer: tl::LAYER,
query: tl::functions::InitConnection {
api_id: config.api_id,
device_model: config.params.device_model.clone(),
system_version: config.params.system_version.clone(),
app_version: config.params.app_version.clone(),
system_lang_code: config.params.system_lang_code.clone(),
lang_pack: "".into(),
lang_code: config.params.lang_code.clone(),
proxy: None,
params: None,
query: tl::functions::help::GetConfig {},
},
})
.await?;
// TODO use the dc id from the config as "this dc", not the input dc id
config.session.user_dc = Some(dc_id);
config.session.save()?;
Ok(sender)
}
/// Method implementations directly related with network connectivity.
impl Client {
/// Creates and returns a new client instance upon successful connection to Telegram.
///
/// If the session in the configuration did not have an authorization key, a new one
/// will be created and the session will be saved with it.
///
/// The connection will be initialized with the data from the input configuration.
///
/// # Examples
///
/// ```
/// use grammers_client::{Client, Config};
/// use grammers_session::Session;
///
/// // Note: these are example values and are not actually valid.
/// // Obtain your own with the developer's phone at https://my.telegram.org.
/// const API_ID: i32 = 932939;
/// const API_HASH: &str = "514727c32270b9eb8cc16daf17e21e57";
///
/// # async fn f(mut client: Client) -> Result<(), Box<dyn std::error::Error>> {
/// let client = Client::connect(Config {
/// session: Session::load_or_create("hello-world.session")?,
/// api_id: API_ID,
/// api_hash: API_HASH.to_string(),
/// params: Default::default(),
/// }).await?;
/// # Ok(())
/// # }
/// ```
pub async fn connect(mut config: Config) -> Result<Self, AuthorizationError> {
let sender = connect_sender(config.session.user_dc.unwrap_or(0), &mut config).await?;
// TODO Sender doesn't have a way to handle backpressure yet
let (handle_tx, handle_rx) = mpsc::unbounded_channel();
Ok(Self {
sender,
config,
handle_tx,
handle_rx,
})
}
/// Invoke a raw API call without the need to use a [`Client::handle`] or having to repeatedly
/// call [`Client::step`]. This directly sends the request to Telegram's servers.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
self.sender.invoke(request).await
}
/// Return a new [`ClientHandle`] that can be used to invoke remote procedure calls.
///
/// # Examples
///
/// ```
/// use tokio::task;
///
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Obtain a handle. After this you can obtain more by using `client_handle.clone()`.
/// let mut client_handle = client.handle();
///
/// // Run the network loop. This is necessary, or no network events will be processed!
/// let network_handle = task::spawn(async move { client.run_until_disconnected().await });
///
/// // Use the `client_handle` to your heart's content, maybe you just want to disconnect:
/// client_handle.disconnect().await;
///
/// // Joining on the spawned task lets us access the result from `run_until_disconnected`,
/// // so we can verify everything went fine. You could also just drop this though.
/// network_handle.await?;
/// # Ok(())
/// # }
///
pub fn handle(&self) -> ClientHandle {
ClientHandle {
tx: self.handle_tx.clone(),
}
}
/// Perform a single network step or processing of incoming requests via handles.
///
/// If a server message is received, requests enqueued via the [`ClientHandle`]s may have
/// their result delivered via a channel, and a (possibly empty) list of updates will be
/// returned.
///
/// The other return values are graceful disconnection, or a read error.
///
/// Most commonly, you will want to use the higher-level abstraction [`Client::next_updates`]
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_client::NetworkStep;
///
/// loop {
/// // Process network events forever until we gracefully disconnect or get an error.
/// match client.step().await? {
/// NetworkStep::Connected { .. } => continue,
/// NetworkStep::Disconnected => break,
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub async fn step(&mut self) -> Result<Step, sender::ReadError> {
let (network, request) = {
let network = self.sender.step();
let request = self.handle_rx.recv();
pin_mut!(network);
pin_mut!(request);
match future::select(network, request).await {
future::Either::Left((network, request)) => {
let request = request.now_or_never();
(Some(network), request)
}
future::Either::Right((request, network)) => {
let network = network.now_or_never();
(network, Some(request))
}
}
};
if let Some(request) = request {
let request = request.expect("mpsc returned None");
match request {
Request::Rpc { request, response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(self.sender.enqueue_body(request)));
}
Request::Disconnect { response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(()));
return Ok(Step::Disconnected);
}
}
}
// TODO request cancellation if this is Err
// (perhaps a method on the sender to cancel_all)
Ok(Step::Connected {
updates: if let Some(updates) = network {
updates?
} else {
Vec::new()
},
})
}
/// Run the client by repeatedly calling [`Client::step`] until a graceful disconnection
/// occurs, or a network error occurs. Incoming updates are ignored and simply dropped.
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// client.run_until_disconnected().await?;
/// # Ok(())
/// # }
/// ```
pub async fn run_until_disconnected(mut self) -> Result<(), sender::ReadError> {
loop {
match self.step().await? {
Step::Connected { .. } => continue,
Step::Disconnected => break Ok(()),
}
}
}
}
/// Method implementations directly related with network connectivity.
impl ClientHandle {
/// Invoke a raw API call.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::ClientHandle) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn | invoke | identifier_name |
|
net.rs | {
/// session: Session::load_or_create("hello-world.session")?,
/// api_id: API_ID,
/// api_hash: API_HASH.to_string(),
/// params: Default::default(),
/// }).await?;
/// # Ok(())
/// # }
/// ```
pub async fn connect(mut config: Config) -> Result<Self, AuthorizationError> {
let sender = connect_sender(config.session.user_dc.unwrap_or(0), &mut config).await?;
// TODO Sender doesn't have a way to handle backpressure yet
let (handle_tx, handle_rx) = mpsc::unbounded_channel();
Ok(Self {
sender,
config,
handle_tx,
handle_rx,
})
}
/// Invoke a raw API call without the need to use a [`Client::handle`] or having to repeatedly
/// call [`Client::step`]. This directly sends the request to Telegram's servers.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
self.sender.invoke(request).await
}
/// Return a new [`ClientHandle`] that can be used to invoke remote procedure calls.
///
/// # Examples
///
/// ```
/// use tokio::task;
///
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Obtain a handle. After this you can obtain more by using `client_handle.clone()`.
/// let mut client_handle = client.handle();
///
/// // Run the network loop. This is necessary, or no network events will be processed!
/// let network_handle = task::spawn(async move { client.run_until_disconnected().await });
///
/// // Use the `client_handle` to your heart's content, maybe you just want to disconnect:
/// client_handle.disconnect().await;
///
/// // Joining on the spawned task lets us access the result from `run_until_disconnected`,
/// // so we can verify everything went fine. You could also just drop this though.
/// network_handle.await?;
/// # Ok(())
/// # }
///
pub fn handle(&self) -> ClientHandle {
ClientHandle {
tx: self.handle_tx.clone(),
}
}
/// Perform a single network step or processing of incoming requests via handles.
///
/// If a server message is received, requests enqueued via the [`ClientHandle`]s may have
/// their result delivered via a channel, and a (possibly empty) list of updates will be
/// returned.
///
/// The other return values are graceful disconnection, or a read error.
///
/// Most commonly, you will want to use the higher-level abstraction [`Client::next_updates`]
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_client::NetworkStep;
///
/// loop {
/// // Process network events forever until we gracefully disconnect or get an error.
/// match client.step().await? {
/// NetworkStep::Connected { .. } => continue,
/// NetworkStep::Disconnected => break,
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub async fn step(&mut self) -> Result<Step, sender::ReadError> {
let (network, request) = {
let network = self.sender.step();
let request = self.handle_rx.recv();
pin_mut!(network);
pin_mut!(request);
match future::select(network, request).await {
future::Either::Left((network, request)) => {
let request = request.now_or_never();
(Some(network), request)
}
future::Either::Right((request, network)) => {
let network = network.now_or_never();
(network, Some(request))
}
}
};
if let Some(request) = request {
let request = request.expect("mpsc returned None");
match request {
Request::Rpc { request, response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(self.sender.enqueue_body(request)));
}
Request::Disconnect { response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(()));
return Ok(Step::Disconnected);
}
}
}
// TODO request cancellation if this is Err
// (perhaps a method on the sender to cancel_all)
Ok(Step::Connected {
updates: if let Some(updates) = network {
updates?
} else {
Vec::new()
},
})
}
/// Run the client by repeatedly calling [`Client::step`] until a graceful disconnection
/// occurs, or a network error occurs. Incoming updates are ignored and simply dropped.
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// client.run_until_disconnected().await?;
/// # Ok(())
/// # }
/// ```
pub async fn run_until_disconnected(mut self) -> Result<(), sender::ReadError> {
loop {
match self.step().await? {
Step::Connected { .. } => continue,
Step::Disconnected => break Ok(()),
}
}
}
}
/// Method implementations directly related with network connectivity.
impl ClientHandle {
/// Invoke a raw API call.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::ClientHandle) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
let (response, rx) = oneshot::channel();
// TODO add a test this (using handle with client dropped)
if let Err(_) = self.tx.send(Request::Rpc {
request: request.to_bytes(),
response,
}) {
// `Client` was dropped, can no longer send requests
return Err(InvocationError::Dropped);
}
// First receive the `oneshot::Receiver` with from the `Client`,
// then `await` on that to receive the response body for the request.
if let Ok(response) = rx.await {
if let Ok(result) = response.await {
match result {
Ok(body) => R::Return::from_bytes(&body).map_err(|e| e.into()),
Err(e) => Err(e),
}
} else {
// `Sender` dropped, won't be receiving a response for this
Err(InvocationError::Dropped)
}
} else {
// `Client` dropped, won't be receiving a response for this
Err(InvocationError::Dropped)
}
}
/// Gracefully tell the [`Client`] that created this handle to disconnect and stop receiving
/// things from the network.
///
/// If the client has already been dropped (and thus disconnected), this method does nothing.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::ClientHandle) {
/// client.disconnect().await;
/// # }
/// ```
pub async fn disconnect(&mut self) {
let (response, rx) = oneshot::channel();
if let Ok(_) = self.tx.send(Request::Disconnect { response }) {
// It's fine to drop errors here, it means the channel was dropped by the `Client`.
drop(rx.await);
} else {
// `Client` is already dropped, no need to disconnect again. | } | random_line_split |
|
net.rs | 3),
(Ipv4Addr::new(149, 154, 167, 51), 443),
(Ipv4Addr::new(149, 154, 175, 100), 443),
(Ipv4Addr::new(149, 154, 167, 92), 443),
(Ipv4Addr::new(91, 108, 56, 190), 443),
];
pub(crate) async fn connect_sender(
dc_id: i32,
config: &mut Config,
) -> Result<Sender<transport::Full, mtp::Encrypted>, AuthorizationError> | sender
};
// TODO handle -404 (we had a previously-valid authkey, but server no longer knows about it)
// TODO all up-to-date server addresses should be stored in the session for future initial connections
let _remote_config = sender
.invoke(&tl::functions::InvokeWithLayer {
layer: tl::LAYER,
query: tl::functions::InitConnection {
api_id: config.api_id,
device_model: config.params.device_model.clone(),
system_version: config.params.system_version.clone(),
app_version: config.params.app_version.clone(),
system_lang_code: config.params.system_lang_code.clone(),
lang_pack: "".into(),
lang_code: config.params.lang_code.clone(),
proxy: None,
params: None,
query: tl::functions::help::GetConfig {},
},
})
.await?;
// TODO use the dc id from the config as "this dc", not the input dc id
config.session.user_dc = Some(dc_id);
config.session.save()?;
Ok(sender)
}
/// Method implementations directly related with network connectivity.
impl Client {
/// Creates and returns a new client instance upon successful connection to Telegram.
///
/// If the session in the configuration did not have an authorization key, a new one
/// will be created and the session will be saved with it.
///
/// The connection will be initialized with the data from the input configuration.
///
/// # Examples
///
/// ```
/// use grammers_client::{Client, Config};
/// use grammers_session::Session;
///
/// // Note: these are example values and are not actually valid.
/// // Obtain your own with the developer's phone at https://my.telegram.org.
/// const API_ID: i32 = 932939;
/// const API_HASH: &str = "514727c32270b9eb8cc16daf17e21e57";
///
/// # async fn f(mut client: Client) -> Result<(), Box<dyn std::error::Error>> {
/// let client = Client::connect(Config {
/// session: Session::load_or_create("hello-world.session")?,
/// api_id: API_ID,
/// api_hash: API_HASH.to_string(),
/// params: Default::default(),
/// }).await?;
/// # Ok(())
/// # }
/// ```
pub async fn connect(mut config: Config) -> Result<Self, AuthorizationError> {
let sender = connect_sender(config.session.user_dc.unwrap_or(0), &mut config).await?;
// TODO Sender doesn't have a way to handle backpressure yet
let (handle_tx, handle_rx) = mpsc::unbounded_channel();
Ok(Self {
sender,
config,
handle_tx,
handle_rx,
})
}
/// Invoke a raw API call without the need to use a [`Client::handle`] or having to repeatedly
/// call [`Client::step`]. This directly sends the request to Telegram's servers.
///
/// Using function definitions corresponding to a different layer is likely to cause the
/// responses to the request to not be understood.
///
/// <div class="stab unstable">
///
/// **Warning**: this method is **not** part of the stability guarantees of semantic
/// versioning. It **may** break during *minor* version changes (but not on patch version
/// changes). Use with care.
///
/// </div>
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_tl_types as tl;
///
/// dbg!(client.invoke(&tl::functions::Ping { ping_id: 0 }).await?);
/// # Ok(())
/// # }
/// ```
pub async fn invoke<R: tl::RemoteCall>(
&mut self,
request: &R,
) -> Result<R::Return, InvocationError> {
self.sender.invoke(request).await
}
/// Return a new [`ClientHandle`] that can be used to invoke remote procedure calls.
///
/// # Examples
///
/// ```
/// use tokio::task;
///
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// // Obtain a handle. After this you can obtain more by using `client_handle.clone()`.
/// let mut client_handle = client.handle();
///
/// // Run the network loop. This is necessary, or no network events will be processed!
/// let network_handle = task::spawn(async move { client.run_until_disconnected().await });
///
/// // Use the `client_handle` to your heart's content, maybe you just want to disconnect:
/// client_handle.disconnect().await;
///
/// // Joining on the spawned task lets us access the result from `run_until_disconnected`,
/// // so we can verify everything went fine. You could also just drop this though.
/// network_handle.await?;
/// # Ok(())
/// # }
///
pub fn handle(&self) -> ClientHandle {
ClientHandle {
tx: self.handle_tx.clone(),
}
}
/// Perform a single network step or processing of incoming requests via handles.
///
/// If a server message is received, requests enqueued via the [`ClientHandle`]s may have
/// their result delivered via a channel, and a (possibly empty) list of updates will be
/// returned.
///
/// The other return values are graceful disconnection, or a read error.
///
/// Most commonly, you will want to use the higher-level abstraction [`Client::next_updates`]
/// instead.
///
/// # Examples
///
/// ```
/// # async fn f(mut client: grammers_client::Client) -> Result<(), Box<dyn std::error::Error>> {
/// use grammers_client::NetworkStep;
///
/// loop {
/// // Process network events forever until we gracefully disconnect or get an error.
/// match client.step().await? {
/// NetworkStep::Connected { .. } => continue,
/// NetworkStep::Disconnected => break,
/// }
/// }
/// # Ok(())
/// # }
/// ```
pub async fn step(&mut self) -> Result<Step, sender::ReadError> {
let (network, request) = {
let network = self.sender.step();
let request = self.handle_rx.recv();
pin_mut!(network);
pin_mut!(request);
match future::select(network, request).await {
future::Either::Left((network, request)) => {
let request = request.now_or_never();
(Some(network), request)
}
future::Either::Right((request, network)) => {
let network = network.now_or_never();
(network, Some(request))
}
}
};
if let Some(request) = request {
let request = request.expect("mpsc returned None");
match request {
Request::Rpc { request, response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(self.sender.enqueue_body(request)));
}
Request::Disconnect { response } => {
// Channel will return `Err` if the `ClientHandle` lost interest, just drop the error.
drop(response.send(()));
return Ok(Step::Disconnected);
}
}
}
// TODO request cancellation if this is Err
// (perhaps a method on the sender to cancel_all)
Ok(Step::Connected {
updates: if let Some(updates) = network {
updates?
} else {
Vec::new()
},
})
}
/// Run the client by repeatedly calling [`Client::step`] until a graceful disconnection
/// occurs, or | {
let transport = transport::Full::new();
let addr = DC_ADDRESSES[dc_id as usize];
let mut sender = if let Some(auth_key) = config.session.auth_key.as_ref() {
info!(
"creating a new sender with existing auth key to dc {} {:?}",
dc_id, addr
);
sender::connect_with_auth(transport, addr, auth_key.clone()).await?
} else {
info!(
"creating a new sender and auth key in dc {} {:?}",
dc_id, addr
);
let sender = sender::connect(transport, addr).await?;
config.session.auth_key = Some(sender.auth_key().clone());
config.session.save()?; | identifier_body |
Props.ts | cn ms, 两次提交间隔时长(防止重复提交)
* @default 1000
*/
throttle?: number
/**
* @en bind form ref, Can call some form methods
* @cn 绑定 form 的引用, 可以调用某些 form 的方法
* @override
*/
formRef?: ((form: FormRef<Value>) => void) | { current?: FormRef<Value> }
/**
* @inner 内部属性
*/
error?: ObjectType<string | Error>
}
/** ----------------formItem-----------------------* */
export interface ItemProps extends StandardProps {
/**
* @inner 待废弃
*/
grid?: number | { width: number; offset: number; response: 'sm' | 'md' | 'lg' | 'xl' }
/**
* @en Single-line error prompt will not stretch the page height
* @cn 单行错误提示不撑开页面高度
* @default false
*/
keepErrorHeight?: boolean
/**
* @en When it is undefined, the tag does not be rendered or occupy space. If there is no content, but it needs to be occupied, you can use an empty string ''.
* @cn 未定义时,标签不会 render,也不会占位。如果无内容需要占位,使用空字符串 ''。
*/
label?: React.ReactNode
/**
* @en The default is empty, follow the theme style.
* @cn 默认为空,跟随主题样式。
*/
labelAlign?: 'top' | 'left' | 'right'
/**
* @en The width of label. It is invalid when labelAlign is 'top'.
* @cn 标签宽度,labelAlign 为 'top' 时无效。
* @default 140px
*/
labelWidth?: string | number
/**
* @en Required tags for pure display. Do not trigger validation
* @cn 必填标记,纯展示用,不会触发校验
* @default false
*/
required?: boolean
/**
* @en Prompting information
* @cn 提示文案
*/
tip?: React.ReactNode
formDatum?: FormDatum<any>
/**
* @en label vertical align
* @cn 标签垂直方向对齐方式
* @default "top"
*/
labelVerticalAlign?: 'top' | 'middle' | 'bottom'
/**
* @en form element
* @cn 表单元素
*/
children?: React.ReactNode
}
/** ----------------formButton-----------------------* */
export interface FormButtonProps extends Omit<ButtonProps, 'htmlType' | 'onRef'> {
onClick?: () => void
type?: RegularAttributes.Type
}
/** ----------------Field-----------------------* */
export interface FieldChildrenFunc<Value = any> {
(params: { value: Value; error?: Error; onChange: (value: Value) => void; disabled?: boolean }): React.ReactNode
}
export interface FieldProps<Value> {
/**
* @inner 内部属性
*/
cache?: boolean
/**
* @inner 内部属性
*/
onChange: (value: Value) => void
/**
* @inner 内部属性
*/
value: Value
/**
* @inner 内部属性
*/
error?: Error
/**
* @inner 内部属性
*/
disabled?: boolean
/**
* @en React components that support value and onChange or function. The function object attribute is as follows:
* value: The value obtained from the parent Form or Form.Block by name.
* error: the error information of data validation. type is Error.
* onChange: The callback when the value is changing.
* disabled: inherit the disabled attribute of Form.
*
* @cn 支持 value 和 onChange 的 React 组件,或者函数,函数object属性如下
* value: 根据 name 从上级 Form 或 Form.Block 获取的值
* error:数据校验错误信息,类型为 Error
* onChange: 值改变回调函数
* disabled: 继承 Form 的 disabled 属性
*
* @override ((opts: object) => ReactNode) | ReactNode
*/
children: React.ReactNode | FieldChildrenFunc<Value>
}
/** ----------------FieldError-----------------------* */
export interface FieldErrorProps {
error?: Error | Error[]
}
/** ----------------Flow-----------------------* */
export interface FlowProps {
/**
* @en datum is the object of Datum.Form.
* @cn datum 为 Datum.Form 对象
* @override (datum: FormDatum) => ReactNode
*/
children: (datum: FormDatum<ObjectType>) => ReactNode
formDatum: FormDatum<ObjectType>
/**
* @en Specifying which fields to change trigger the Flow update.
* @cn names 为空时,Form 内任意值变化会触发 Flow 更新;不为空时,只监听指定字段变化
*/
names?: string[]
}
/** ----------------index-----------------------* */
// Form
export type FormCardConsumerKey = 'setFormStatus'
export type FormDatumKey = 'removeUndefined' | 'error'
export type FormPropsWithCardConsumer<Value> = CardConsumerType<SimpleFormProps<Value>, FormCardConsumerKey>
export type FormPropsWithProvider<Value> = GetFormProviderProps<FormPropsWithCardConsumer<Value>, Value>
/**
* @title Form
*/
export type FormProps<Value> = GetDatumFormProps<FormPropsWithProvider<Value>, Value, FormDatumKey>
export declare class Form<Value> extends React.Component<FormProps<Value>, any> {
static Item: typeof FormItem
static Field: typeof FormField
static Flow: typeof FormFlow
static FieldSet: typeof FormFieldSet
static Submit: React.ComponentType<FormButtonProps>
static Reset: React.ComponentType<FormButtonProps>
static Button: React.ComponentType<FormButtonProps>
static formConsumer: any
static useMode: (...args: string[]) => React.ComponentType<FormModeProps>[]
render: () => JSX.Element
}
export type FormCompType = typeof Form
// FormItem
export type FormItemWithFormConsumerKeys =
| 'formDatum'
| 'labelWidth'
| 'labelAlign'
| 'labelVerticalAlign'
| 'keepErrorHeight'
/**
* @title Form.Item
* @cn 表单项,主要用来布局,显示标签,提示文案信息等
* @en Used to layout, display labels, tips, errors, etc
*/
export type FormItemProps = GetFormConsumerProps<ItemProps, FormItemWithFormConsumerKeys>
export declare class FormItem extends React.Component<FormItemProps, any> {
render: () => JSX.Element
}
/**
* @title Form.Field
* @en Used to handle custom form components, enabling custom form components to get/store/validate value from formdata by name.
* @cn 用于处理自定义表单组件,使自定义表单组件实现通过rules校验,存储数据功能
*/
export type FormFieldProps<Value> = GetInputableProps<FieldProps<Value>, Value>
export declare class FormField<Value> extends React.Component<FormFieldProps<Value>, any> {
render: () => JSX.Element
}
export type FormFlowWithFormConsumerKeys = 'formDatum'
export type FormFieldSetWithFormConsumerKeys = 'formDatum'
/**
* @title Form.FieldSet
* @en Handle a set(group) data from form by name
* @cn 用来处理 object 类型 字段和数组。
*/
export type FormFieldSetProps<Value> = GetFormConsumerProps<FieldSetProps<Value>, FormFlowWithFormConsumerKeys>
export declare class FormFieldSet<Value> extends React.Component<FormFieldSetProps<Value>, any> {
render: () => JSX.Element
}
/**
* @title Form.Flow
*/
export type FormFlowProps = GetFormConsumerProps<FlowProps, FormFlowWithFormConsumerKeys>
export declare class FormFlow extends React.Component<FormFlowProps, any> {
render: () => JSX.Element
}
/**
* @title FormRef
* @en Form instance method
* @cn Form 实例的一些方法
* @isDetail true
*/
export interface FormRef<Value> {
/**
* @en return form value
* @cn 返回表单的值
*/
getValue: () => Value
/**
* @en Validate form
* @cn 校验表单
*/
validate: () => Promise<any>
/**
* @en Validation form fields
* @cn 校验表单指定字段
*/
validateFields: (fields: string | string[]) => Promise<any>
/**
* @en The verification can get the error message through Promise.catch
* @cn 校验可以通过 catch 获取报错信息
*/
validateFieldsWithError: (fields: string | string[]) => Promise<any>
/**
* @en Clear check
* @cn 清除校验
*/
clearValidate: () => void
/**
* @en Submit Form, withValidate: Whether to verify
* @cn 提交表单, withValidate: 是否校验
*/
submit: (withValidate?: boolean) => void
/**
* @en reset form
* @cn 重置表单
*/
reset: () => void
}
| identifier_name |
||
Props.ts | import { CardConsumerType } from '../Card/Props'
import { GetDatumFormProps } from '../Datum/Props'
export interface RuleObject {
[name: string]: FormItemRule<any> | RuleObject
}
/** ----------------fieldSet-----------------------* */
export interface FieldSetChildrenFunc<Value = any> {
(
params: {
list: any
value: Value
onChange: (value: Value[]) => void
onRemove: () => void
index: number
onInsert: (value: Value) => void
onAppend: (value: Value) => void
error: Error[]
datum: FormDatum<ObjectType>
}
): React.ReactNode
}
export interface FieldSetProviderValueType {
path?: string
val?: () => Promise<FormError | true>
}
export interface FieldSetProps<Value = any> {
/**
* @en default Value
* @cn 默认值
*/
defaultValue?: Value[]
/**
* @en Show content when data is empty. (only valid when children is function)
* @cn 数据为空时展示内容。(仅在children为function时有效)
*/
empty?: (onInsert: (value: Value) => void) => React.ReactNode
formDatum: FormDatum<ObjectType>
/**
* @en The name that accesses data from from
* @cn 从 Form 中存取数据的名称
*/
name: string
/**
* @inner 内部属性
*/
onError?: (error?: Error) => void
/**
* @en Validation rules
* @cn 校验规则
* @override RuleItem[]
*/
rules?: FormItemRule<any>
/**
* @en When children type is not function, handle a set data type of object
* When children type is function, handle a group of data type of array. options property:
* list: all data of name.
* value: a single piece of data for the value obtained by name.
* onChange: a callback when the value is changing.
* onRemove: a callback when a child component is removed.
* index: the current index.
* onInsert: Insert a piece of data before the current item.
* onAppend: Insert a piece of data after the current item.
*
* @cn children 不为 function,用来处理 object 类型数据,children 内的 name 会拼接 FieldSet name,如 FieldSet name 为 'a', children 元素name 为 b,children 实际处理的数据为 a.b;
* children 为 function 时,用来处理数组数据。options 属性为
* list: name 下的全部数据。
* value: 根据name获取的值的单条数据。
* onChange: 子组件数据改变回调。
* onRemove: 子组件删除回调。
* index: 当前项索引。
* onInsert: 在当前项之前插入一条数据。
* onAppend: 在当前项之后附加一条数据。
*
* @override ((opts: object) => ReactNode) |ReactNode
*/
children: FieldSetChildrenFunc<Value> | React.ReactNode
}
export type GetFieldSetConsumerProps<U> = Omit<U, 'innerFormNamePath' | 'fieldSetValidate'>
/** ----------------formContext-----------------------* */
export interface FormContextValue {
formDatum: FormDatum<ObjectType>
formMode?: string
disabled?: any
labelAlign?: 'top' | 'left' | 'right'
labelVerticalAlign?: 'top' | 'middle' | 'bottom'
size?: RegularAttributes.Size
labelWidth?: string | number
combineRules: <U>(name: string, rule: FormItemRule<U>) => FormItemRule<U>
keepErrorHeight?: boolean
}
export interface FormProviderProps<V extends ObjectType> {
datum: FormDatum<V>
disabled?: boolean
/**
* @en The default is empty, follow the theme style.
* @cn 默认为空,跟随主题样式。
*/
labelAlign?: 'top' | 'left' | 'right'
/**
* @en the default value is top.
* @cn 默认顶部对齐
* @default 'top'
*/
labelVerticalAlign?: 'top' | 'middle' | 'bottom'
/**
* @en Form element size
* @cn 表单元素的尺寸
* @default 'default'
* @override union
*/
size?: RegularAttributes.Size
/**
* @en The width of label. It is invalid when labelAlign is 'top'.
* @cn 标签宽度,labelAlign 为 'top' 时无效。
* @default 140px
*/
labelWidth?: number | string
/**
* @en mode, with useMode
* @cn 模式,和 useMode 配合使用
*/
mode?: string
pending?: boolean
rules?: RuleObject
/**
* @en Single-line error prompt will not stretch the page height
* @cn 单行错误提示不撑开页面高度
* @default false
*/
keepErrorHeight?: boolean
}
export type FormContextKey = keyof FormContextValue
export type GetFormConsumerProps<U, Keys extends FormContextKey> = Omit<
PartialKeys<U, Keys>,
'formDatum' | 'combineRules'
>
export type GetFormProviderProps<U, Value> = ForceAdd<U, FormProviderProps<Value>>
/** ----------------formItemContext-----------------------* */
export interface FormItemContextValue {
onItemError?: (id: string, error?: Error) => void
bindInputToItem?: (name: string) => void
unbindInputFromItem?: (name: string) => void
}
export type GetFormItemConsumerProps<U> = Omit<U, keyof FormItemContextValue>
/** ----------------inputable-----------------------* */
export interface BaseInputProps {
value?: any
onChange?: any
}
export interface InputableProps<Value> {
/**
* @en The callback before the value is changed, when the return value is not empty, it will be used as the new value of the component
* @cn 值改变前的回调,当返回值不为空时将作为组件的新值
* @override (value: any , datum?: FormDatum) => any
*/
beforeChange?: (value: Value | undefined, datum?: FormDatum<ObjectType>) => Value | undefined | void
onChange?: (value: Value | undefined, ...rest: any) => void
/**
* @en rules validation callback
* @cn rules 校验回调
*/
onError?: (e?: Error) => void
/**
* @inner 内部属性
*/
popover?: string
/**
* @en value
* @cn 值
*/
value?: Value
/**
* @inner 内部属性
*/
error?: Error
// readOnly?: boolean
disabled?: boolean
/**
* @en onChange is not triggered when two selected values are the same
* @cn 当两次选择的值相同时不触发 onChange
* @default false
*/
filterSameChange?: boolean
combineRules?: (name: string, rules?: FormItemRule<Value>) => FormItemRule<Value>
required?: boolean
/**
* @en When the value changes, it will link to verify the fields in the bind, which needs to be used with Form
* @cn 当值改变是会联动校验 bind 中的字段, 需要配合 Form 使用
*/
bind?: string[]
onItemError?: (id: string, error?: Error) => void
bindInputToItem?: (name: string) => void
unbindInputFromItem?: (name: string) => void
/**
* @inner 内部属性
*/
scuSkip?: string[]
/**
* @en defaultValue 和 value 类型相同
* @cn 默认值 和 value 类型相同
*/
defaultValue?: Value
/**
* @en If set to true, the form will not automatically delete the data after the component is uninstalled
* @cn 设置为 true 组件卸载后表单不自动删除数据
*/
reserveAble?: boolean
/**
* @en Validation rules, see [Rule](/components/rule) usage for details
* @cn 校验规则 详见 [Rule](/components/rule)
* @override RuleItem[]
*/
rules?: FormItemRule<Value>
/**
* @inner 内部属性
*/
formDatum?: FormDatum<ObjectType>
fieldSetValidate?: (validator: boolean) => void
/**
* @en Form field, used with Form
* @cn 表单字段, 配合 Form 使用
*/
name?: string | string[]
/**
* @inner 内部属性
*/
forceChangeOn | import { FormError } from '../utils/errors'
import FormDatum from '../Datum/Form'
import { ForceAdd, ObjectType, StandardProps, RegularAttributes, PartialKeys } from '../@types/common'
import { FormItemRule } from '../Rule/Props'
import { ButtonProps } from '../Button/Props' | random_line_split |
|
views.py | products = Product.objects.values('title', 'balance', 'price')
result['Список товаров с остатками и ценами'] = list(products)
# Суммарная стоимость всех товаров на складе
total_cost = Product.objects.aggregate(summary_cost=Sum(F('balance')*F('price')))
result['Суммарная стоимость всех товаров на складе'] = total_cost['summary_cost']
# Количество заказов
orders_count = Order.objects.count()
result['Общее количество заказов в базе'] = orders_count
# Список заказов с суммой каждого заказа (можно ли дать имя вычисляемому полю? спросить у Жени)
orders_list = Order.objects.values_list(
'client__title',
'product__title',
'dt_create',
'count',
F('count')*F('product__price')
)
orders_list = list(orders_list)
orders_list.sort(reverse=True, key=lambda e: e[4])
result['Список заказов с суммой каждого заказа'] = orders_list
# Среднее количество товаров в заказе
avg_count_in_order = Order.objects.aggregate(Avg('count'))['count__avg']
result['Среднее количество товаров в заказе'] = avg_count_in_order
# Получаем список клиентов и для каждого подсчитываем сумму сделанных им заказов
# Клиенты, не сделавшие ни одного заказа - отсекаются (filter(sum_counts__isnull=False))
# Результаты сортируются по возрастанию суммы заказа
clients_order_cost = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False).order_by('sum_counts')
result['Список клиентов и сумма их заказов'] = [
{client.title: client.sum_counts} for client in clients_order_cost
]
# Получаем список клиентов, превысивших свой кредитный лимит
clients_over_limit = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False, sum_counts__gt=F('credit_limit'))
result['Список клиентов, превысивших свой кредитный лимит'] = [
{
'Клиент': client.title,
'Превышение лимина': (client.sum_counts-client.credit_limit)
} for client in clients_over_limit
]
# Получаем товары, которых заказано больше их наличия на складе
products_over_balance = Product.objects.annotate(
sum_counts=Sum('order__count')
).filter(sum_counts__isnull=False, sum_counts__gt=F('balance'))
result['Список товаров, которых заказано больше их количества на складе'] = [
{
'Товар': product.title,
'Превышение количества': (product.sum_counts - product.balance)
} for product in products_over_balance
]
# Пример использования класса Prefetch: выбор всех клиентов, которые заказывали видеокарты
video_pr = Prefetch(
'order_set',
queryset=Order.objects.filter(product__title__istartswith='Видеокарта'),
to_attr='video_orders'
)
clients_videocard = Client.objects.prefetch_related(video_pr).all()
result['Клиенты, заказавшие видеокарты'] = [
{
'Клиент': client.title,
'Заказы': [
order.__str__() for order in client.video_orders
]
} for client in clients_videocard if client.video_orders
]
return JsonResponse(result)
@require_GET
def spec_stat(request, stat_type):
# Получаем сумму всех кретидных лимитов клиентов
if stat_type == 'sum_limits':
sum_limits = Client.objects.aggregate(sum=Sum('credit_limit'))['sum']
result = {'Сумма кредитных лимитов клиентов': sum_limits}
# Получаем сумму кредитных лимитов всех клиентов
if stat_type == 'sum_limits_vip':
sum_limits_vip = Client.objects.aggregate(
sum=Sum('credit_limit', filter=Q(vip=True))
)['sum']
result = {'Сумма кредитных лимитов VIP-клиентов': sum_limits_vip}
# Получаем всех клиентов, у которых кредитный лимит выше среднего
if stat_type == 'limit_over_avg':
avg_limit = Client.objects.aggregate(avg=Avg('credit_limit'))['avg']
clients = Client.objects.filter(
credit_limit__gt=avg_limit
).values_list(
'title', flat=True
)
result = {'Клиенты, у которых кредитный лимит выше среднего': list(clients)}
# Клиенты и список их заказов
if stat_type == 'clients_and_orders':
clients = Client.objects.prefetch_related('order_set').all()
result = {}
for client in clients:
result[client.title] = list(client.order_set.all().values('product__title', 'count'))
# vip-клиенты и список их заказов, включающих видеокарты
if stat_type == 'vip_clients_and_orders_video':
pr = Prefetch('order_set', queryset=Order.objects.filter(product__title__contains='Видеокарта'))
clients = Client.objects.prefetch_related(pr).filter(vip=True)
result = {}
for client in clients:
order_list = list(client.order_set.values_list('product__title', 'count'))
if order_list:
result[client.title] = order_list
# Полная стоимость каждого товара на складе
if stat_type == 'products_cost':
products_with_coast = Product.objects.annotate(coast=F('price')*F('balance'))
result = {'Список товаров': [
{
'Товар': p.title,
'Полная строимость (цена*остаток)': p.coast
} for p in products_with_coast
]}
print(result)
return JsonResponse(result)
@api_view(['GET'])
def get_clients(request):
clients = Client.objects.all()
serializer = SimpleClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['POST'])
def create_client(request):
serializer = SimpleClientSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response('Запрос принят и успешно обработан')
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_detailed_clients(request):
clients = Client.objects.all()
serializer = ClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['GET', 'POST'])
def person_demo(request):
if request.method == 'GET':
person = Person('Сергей Лебидко', 'm', 35, '[email protected]')
serializer = PersonSerializer(person)
return Response([serializer.data])
if request.method == 'POST':
person_for_update = Person(name='Вася Пупкин', gender='m', age=15, email='[email protected]')
original_data = str(person_for_update)
serializer = PersonSerializer(data=request.data, instance=person_for_update)
if serializer.is_valid():
return Response({
'Данные до обновления экземпляра': original_data,
'Валидированные данные': serializer.validated_data,
'Данные после обновления экземпляра': str(serializer.save())
})
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_urls_list(request):
from .urls import urlpatterns
return Response([str(url) for url in urlpatterns])
class ProductViewSet(ModelViewSet):
queryset = Product.objects.all()
serializer_class = ProductSerializer
class ClientViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin, CreateModelMixin, DestroyModelMixin):
queryset = Client.objects.all()
serializer_class = SimpleClientSerializer
class OrderViewSet(ModelViewSet):
queryset = Order.objects.all()
serializer_class = OrderSerializer
# Класс-контроллер входа на сайт
class Login(LoginView):
template_name = 'main/login.html'
# Контроллер отображает страницу текущего пользователя
@require_GET
def show_current_user(request):
return render(request, 'main/current_user.html', {})
# Класс-контроллер выхода с сайта
class Logout(LogoutView):
next_page = reverse_lazy('show_current_user')
@api_view(['GET'])
@permission_classes([MyPermission])
def test_permission(request):
return Response('Тест разрешений выполнен успешно')
@api_view(['GET'])
def clear_orders(request):
count = Order.objects.all().delete()[0]
return Response(f'Удалено заказов: {count}')
# Контроллер для быстрого тестирования различных фишек django / drf
# @api_view(['GET'])
def test(request):
from django.http import HttpResponse
return HttpR | esponse(request.GET['dt'])
| conditional_block |
|
views.py | список товаров
products = Product.objects.values('title', 'balance', 'price')
result['Список товаров с остатками и ценами'] = list(products)
# Суммарная стоимость всех товаров на складе
total_cost = Product.objects.aggregate(summary_cost=Sum(F('balance')*F('price')))
result['Суммарная стоимость всех товаров на складе'] = total_cost['summary_cost']
# Количество заказов
orders_count = Order.objects.count()
result['Общее количество заказов в базе'] = orders_count
# Список заказов с суммой каждого заказа (можно ли дать имя вычисляемому полю? спросить у Жени)
orders_list = Order.objects.values_list(
'client__title',
'product__title',
'dt_create',
'count',
F('count')*F('product__price')
)
orders_list = list(orders_list)
orders_list.sort(reverse=True, key=lambda e: e[4])
result['Список заказов с суммой каждого заказа'] = orders_list
# Среднее количество товаров в заказе
avg_count_in_order = Order.objects.aggregate(Avg('count'))['count__avg']
result['Среднее количество товаров в заказе'] = avg_count_in_order
# Получаем список клиентов и для каждого подсчитываем сумму сделанных им заказов
# Клиенты, не сделавшие ни одного заказа - отсекаются (filter(sum_counts__isnull=False))
# Результаты сортируются по возрастанию суммы заказа
clients_order_cost = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False).order_by('sum_counts')
result['Список клиентов и сумма их заказов'] = [
{client.title: client.sum_counts} for client in clients_order_cost
]
# Получаем список клиентов, превысивших свой кредитный лимит
clients_over_limit = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False, sum_counts__gt=F('credit_limit'))
result['Список клиентов, превысивших свой кредитный лимит'] = [
{
'Клиент': client.title,
'Превышение лимина': (client.sum_counts-client.credit_limit)
} for client in clients_over_limit
]
# Получаем товары, которых заказано больше их наличия на складе
products_over_balance = Product.objects.annotate(
sum_counts=Sum('order__count')
).filter(sum_counts__isnull=False, sum_counts__gt=F('balance'))
result['Список товаров, которых заказано больше их количества на складе'] = [
{
'Товар': product.title,
'Превышение количества': (product.sum_counts - product.balance)
} for product in products_over_balance
]
# Пример использования класса Prefetch: выбор всех клиентов, которые заказывали видеокарты
video_pr = Prefetch(
'order_set',
queryset=Order.objects.filter(product__title__istartswith='Видеокарта'),
to_attr='video_orders'
)
clients_videocard = Client.objects.prefetch_related(video_pr).all()
result['Клиенты, заказавшие видеокарты'] = [
{
'Клиент': client.title,
'Заказы': [
order.__str__() for order in client.video_orders
]
} for client in clients_videocard if client.video_orders
]
return JsonResponse(result)
@require_GET
def spec_stat(request, stat_type):
# Получаем сумму всех кретидных лимитов клиентов
if stat_type == 'sum_limits':
sum_limits = Client.objects.aggregate(sum=Sum('credit_limit'))['sum']
result = {'Сумма кредитных лимитов клиентов': sum_limits}
# Получаем сумму кредитных лимитов всех клиентов
if stat_type == 'sum_limits_vip':
sum_limits_vip = Client.objects.aggregate(
sum=Sum('credit_limit', filter=Q(vip=True))
)['sum']
result = {'Сумма кредитных лимитов VIP-клиентов': sum_limits_vip}
# Получаем всех клиентов, у которых кредитный лимит выше среднего
if stat_type == 'limit_over_avg':
avg_limit = Client.objects.aggregate(avg=Avg('credit_limit'))['avg']
clients = Client.objects.filter(
credit_limit__gt=avg_limit
).values_list(
'title', flat=True
)
result = {'Клиенты, у которых кредитный лимит выше среднего': list(clients)}
# Клиенты и список их заказов
if stat_type == 'clients_and_orders':
clients = Client.objects.prefetch_related('order_set').all()
result = {}
for client in clients:
result[client.title] = list(client.order_set.all().values('product__title', 'count'))
# vip-клиенты и список их заказов, включающих видеокарты
if stat_type == 'vip_clients_and_orders_video':
pr = Prefetch('order_set', queryset=Order.objects.filter(product__title__contains='Видеокарта'))
clients = Client.objects.prefetch_related(pr).filter(vip=True)
result = {}
for client in clients:
order_list = list(client.order_set.values_list('product__title', 'count'))
if order_list:
result[client.title] = order_list
# Полная стоимость каждого товара на складе
if stat_type == 'products_cost':
products_with_coast = Product.objects.annotate(coast=F('price')*F('balance'))
result = {'Список товаров': [
{
'Товар': p.title,
'Полная строимость (цена*остаток)': p.coast
} for p in products_with_coast
]}
print(result)
return JsonResponse(result)
@api_view(['GET'])
def get_clients(request):
clients = Client.objects.all()
serializer = SimpleClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['POST'])
def create_client(request):
serializer = SimpleClientSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response('Запрос принят и успешно обработан')
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_detailed_clients(request):
clients = Client.objects.all()
serializer = ClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['GET', 'POST'])
def person_demo(request):
if request.method == 'GET':
person = Person('Сергей Лебидко', 'm', 35, '[email protected]')
serializer = PersonSerializer(person)
return Response([serializer.data])
if request.method == 'POST':
person_for_update = Person(name='Вася Пупкин', gender='m', age=15, email='[email protected]')
original_data = str(person_for_update)
serializer = PersonSerializer(data=request.data, instance=person_for_update)
if serializer.is_valid():
return Response({
'Данные до обновления экземпляра': original_data,
'Валидированные данные': serializer.validated_data,
'Данные после обновления экземпляра': str(serializer.save())
})
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_urls_list(request):
from .urls import urlpatterns
return Response([str(url) for url in urlpatterns])
class ProductViewSet(ModelViewSet):
queryset = Product.objects.all()
serializer_class = ProductSerializer
class ClientViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin, CreateModelMixin, DestroyModelMixin):
queryset = Client.objects.all()
serializer_class = SimpleClientSerializer
class OrderViewSet(ModelViewSet):
queryset = Order.objects.all()
serializer_class = OrderSerializer
# Класс-контроллер входа на сайт
class Login(LoginView):
template_name = 'main/login.html'
# Контроллер отображает страницу текущего пользователя
@require_GET
def show_current_user(request):
return render(request, 'main/current_user.html', {})
# Класс-контроллер выхода с сайта
class Logout(LogoutView):
next_page = reverse_lazy('show_current_user')
@api_view(['GET'])
@permission_classes([MyPermission])
def test_permission(request):
return Response('Тест разрешений выполнен успешно')
@api_view(['GET'])
def clear_orders(request):
count = Order.objects.all().delete()[0]
return Response(f'Удалено заказов: {count}')
# Контроллер для быстрого тестирования различных фишек django / drf
# @api_view(['GET'])
def test(request):
from django.http import HttpResponse
return HttpResponse(request.GET['dt'])
| identifier_name |
||
views.py | их заказов'],
['GET /spec_stat/vip_clients_and_orders_video/', 'vip-клиенты и список их заказов, включающих видеокарты'],
['GET /spec_stat/products_cost/', 'Список товаров с их полной стоимостью (цена*количество)'],
['GET /get_clients/', 'Список клиентов (формируется с помощью DRF)'],
['GET /create_client/', 'Создать клиента (формируется с помощью DRF)'],
['GET /test/', 'Хук для тестовых запросов'],
['GET или POST /person_demo/', 'Хук для тестирования работы с сериализатором обычного объекта'],
['GET /get_detailed_clients/', 'Выводит список клиентов, свомещенный со списками id их заказов'],
['GET /get_url_list/', 'Выводит список всех элементов списка urlpatterns'],
]
}
return render(request, 'main/hooks.html', context)
@require_GET
def create_orders(request):
# Получаем количество заказаов, которые будем формировать
orders_count = request.GET.get('orders_count')
if not orders_count:
orders_count = request.default_orders_count
orders_count = int(orders_count)
# Предварительно очищаем БД от старых заказов
Order.objects.all().delete() | clients = Client.objects.all()
products = Product.objects.all()
# Добавляем новые заказы
today = date.today()
for i in range(0, orders_count):
client = random.choice(clients)
product = random.choice(products)
t_delta = timedelta(days=random.randint(0, 10))
dt_create = today - t_delta
count = random.randint(1, 10)
Order.objects.create(
client=client,
product=product,
dt_create=dt_create,
count=count
)
result = {'Создано заказов': str(orders_count)}
return JsonResponse(result)
@require_GET
def statistic(request):
result = {}
# values возвращает queryset, содержащий словари
# clients = Client.objects.values('title')
# result['Список клиентов (получен с помощью values)'] = list(clients)
# values_list возвращает queryset, содержащий кортежи
# clients = Client.objects.values_list('title')
# result['Список клиентов (получен с помощью values_list (flat=False))'] = list(clients)
# values_list с пареметром flat=True возвращает queryset, содержащий значения поля
clients = Client.objects.values_list('title', flat=True)
result['Список клиентов (получен с помощью values_list (flat=True))'] = list(clients)
# Пример использования функции aggregate для получения сводной информации по всей таблице клиентов сразу
agr_values = Client.objects.aggregate(
count_clients=Count('title'),
credit_limit_max=Max('credit_limit'),
credit_limit_min=Min('credit_limit'),
credit_limit_avg=Avg('credit_limit'),
credit_limit_sum=Sum('credit_limit')
)
result['Сводные данные по всем клиентам'] = agr_values
# Получаем информацию о количестве заказов, сделанных каждым клиентом
clients_with_order_counts = Client.objects.annotate(Count('order')).order_by('-order__count')
result['Количество заказов, сделанных клиентами'] = {
client.title: client.order__count for client in clients_with_order_counts
}
# По примеру формирования списка клиентов формируем и список товаров
products = Product.objects.values('title', 'balance', 'price')
result['Список товаров с остатками и ценами'] = list(products)
# Суммарная стоимость всех товаров на складе
total_cost = Product.objects.aggregate(summary_cost=Sum(F('balance')*F('price')))
result['Суммарная стоимость всех товаров на складе'] = total_cost['summary_cost']
# Количество заказов
orders_count = Order.objects.count()
result['Общее количество заказов в базе'] = orders_count
# Список заказов с суммой каждого заказа (можно ли дать имя вычисляемому полю? спросить у Жени)
orders_list = Order.objects.values_list(
'client__title',
'product__title',
'dt_create',
'count',
F('count')*F('product__price')
)
orders_list = list(orders_list)
orders_list.sort(reverse=True, key=lambda e: e[4])
result['Список заказов с суммой каждого заказа'] = orders_list
# Среднее количество товаров в заказе
avg_count_in_order = Order.objects.aggregate(Avg('count'))['count__avg']
result['Среднее количество товаров в заказе'] = avg_count_in_order
# Получаем список клиентов и для каждого подсчитываем сумму сделанных им заказов
# Клиенты, не сделавшие ни одного заказа - отсекаются (filter(sum_counts__isnull=False))
# Результаты сортируются по возрастанию суммы заказа
clients_order_cost = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False).order_by('sum_counts')
result['Список клиентов и сумма их заказов'] = [
{client.title: client.sum_counts} for client in clients_order_cost
]
# Получаем список клиентов, превысивших свой кредитный лимит
clients_over_limit = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False, sum_counts__gt=F('credit_limit'))
result['Список клиентов, превысивших свой кредитный лимит'] = [
{
'Клиент': client.title,
'Превышение лимина': (client.sum_counts-client.credit_limit)
} for client in clients_over_limit
]
# Получаем товары, которых заказано больше их наличия на складе
products_over_balance = Product.objects.annotate(
sum_counts=Sum('order__count')
).filter(sum_counts__isnull=False, sum_counts__gt=F('balance'))
result['Список товаров, которых заказано больше их количества на складе'] = [
{
'Товар': product.title,
'Превышение количества': (product.sum_counts - product.balance)
} for product in products_over_balance
]
# Пример использования класса Prefetch: выбор всех клиентов, которые заказывали видеокарты
video_pr = Prefetch(
'order_set',
queryset=Order.objects.filter(product__title__istartswith='Видеокарта'),
to_attr='video_orders'
)
clients_videocard = Client.objects.prefetch_related(video_pr).all()
result['Клиенты, заказавшие видеокарты'] = [
{
'Клиент': client.title,
'Заказы': [
order.__str__() for order in client.video_orders
]
} for client in clients_videocard if client.video_orders
]
return JsonResponse(result)
@require_GET
def spec_stat(request, stat_type):
# Получаем сумму всех кретидных лимитов клиентов
if stat_type == 'sum_limits':
sum_limits = Client.objects.aggregate(sum=Sum('credit_limit'))['sum']
result = {'Сумма кредитных лимитов клиентов': sum_limits}
# Получаем сумму кредитных лимитов всех клиентов
if stat_type == 'sum_limits_vip':
sum_limits_vip = Client.objects.aggregate(
sum=Sum('credit_limit', filter=Q(vip=True))
)['sum']
result = {'Сумма кредитных лимитов VIP-клиентов': sum_limits_vip}
# Получаем всех клиентов, у которых кредитный лимит выше среднего
if stat_type == 'limit_over_avg':
avg_limit = Client.objects.aggregate(avg=Avg('credit_limit'))['avg']
clients = Client.objects.filter(
credit_limit__gt=avg_limit
).values_list(
'title', flat=True
)
result = {'Клиенты, у которых кредитный лимит выше среднего': list(clients)}
# Клиенты и список их заказов
if stat_type == 'clients_and_orders':
clients = Client.objects.prefetch_related('order_set').all()
result = {}
for client in clients:
result[client.title] = list(client.order_set.all().values('product__title', 'count'))
# vip-клиенты и список их заказов, включающих видеокарты
if stat_type == 'vip_clients_and_orders_video':
pr = Prefetch('order_set', queryset=Order.objects.filter(product__title__contains='Видеокарта'))
clients = Client.objects.pref |
# Получаем списки клиентов и товаров | random_line_split |
views.py | ым клиентом
clients_with_order_counts = Client.objects.annotate(Count('order')).order_by('-order__count')
result['Количество заказов, сделанных клиентами'] = {
client.title: client.order__count for client in clients_with_order_counts
}
# По примеру формирования списка клиентов формируем и список товаров
products = Product.objects.values('title', 'balance', 'price')
result['Список товаров с остатками и ценами'] = list(products)
# Суммарная стоимость всех товаров на складе
total_cost = Product.objects.aggregate(summary_cost=Sum(F('balance')*F('price')))
result['Суммарная стоимость всех товаров на складе'] = total_cost['summary_cost']
# Количество заказов
orders_count = Order.objects.count()
result['Общее количество заказов в базе'] = orders_count
# Список заказов с суммой каждого заказа (можно ли дать имя вычисляемому полю? спросить у Жени)
orders_list = Order.objects.values_list(
'client__title',
'product__title',
'dt_create',
'count',
F('count')*F('product__price')
)
orders_list = list(orders_list)
orders_list.sort(reverse=True, key=lambda e: e[4])
result['Список заказов с суммой каждого заказа'] = orders_list
# Среднее количество товаров в заказе
avg_count_in_order = Order.objects.aggregate(Avg('count'))['count__avg']
result['Среднее количество товаров в заказе'] = avg_count_in_order
# Получаем список клиентов и для каждого подсчитываем сумму сделанных им заказов
# Клиенты, не сделавшие ни одного заказа - отсекаются (filter(sum_counts__isnull=False))
# Результаты сортируются по возрастанию суммы заказа
clients_order_cost = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False).order_by('sum_counts')
result['Список клиентов и сумма их заказов'] = [
{client.title: client.sum_counts} for client in clients_order_cost
]
# Получаем список клиентов, превысивших свой кредитный лимит
clients_over_limit = Client.objects.annotate(
sum_counts=Sum(
F('order__count')*F('order__product__price')
)
).filter(sum_counts__isnull=False, sum_counts__gt=F('credit_limit'))
result['Список клиентов, превысивших свой кредитный лимит'] = [
{
'Клиент': client.title,
'Превышение лимина': (client.sum_counts-client.credit_limit)
} for client in clients_over_limit
]
# Получаем товары, которых заказано больше их наличия на складе
products_over_balance = Product.objects.annotate(
sum_counts=Sum('order__count')
).filter(sum_counts__isnull=False, sum_counts__gt=F('balance'))
result['Список товаров, которых заказано больше их количества на складе'] = [
{
'Товар': product.title,
'Превышение количества': (product.sum_counts - product.balance)
} for product in products_over_balance
]
# Пример использования класса Prefetch: выбор всех клиентов, которые заказывали видеокарты
video_pr = Prefetch(
'order_set',
queryset=Order.objects.filter(product__title__istartswith='Видеокарта'),
to_attr='video_orders'
)
clients_videocard = Client.objects.prefetch_related(video_pr).all()
result['Клиенты, заказавшие видеокарты'] = [
{
'Клиент': client.title,
'Заказы': [
order.__str__() for order in client.video_orders
]
} for client in clients_videocard if client.video_orders
]
return JsonResponse(result)
@require_GET
def spec_stat(request, stat_type):
# Получаем сумму всех кретидных лимитов клиентов
if stat_type == 'sum_limits':
sum_limits = Client.objects.aggregate(sum=Sum('credit_limit'))['sum']
result = {'Сумма кредитных лимитов клиентов': sum_limits}
# Получаем сумму кредитных лимитов всех клиентов
if stat_type == 'sum_limits_vip':
sum_limits_vip = Client.objects.aggregate(
sum=Sum('credit_limit', filter=Q(vip=True))
)['sum']
result = {'Сумма кредитных лимитов VIP-клиентов': sum_limits_vip}
# Получаем всех клиентов, у которых кредитный лимит выше среднего
if stat_type == 'limit_over_avg':
avg_limit = Client.objects.aggregate(avg=Avg('credit_limit'))['avg']
clients = Client.objects.filter(
credit_limit__gt=avg_limit
).values_list(
'title', flat=True
)
result = {'Клиенты, у которых кредитный лимит выше среднего': list(clients)}
# Клиенты и список их заказов
if stat_type == 'clients_and_orders':
clients = Client.objects.prefetch_related('order_set').all()
result = {}
for client in clients:
result[client.title] = list(client.order_set.all().values('product__title', 'count'))
# vip-клиенты и список их заказов, включающих видеокарты
if stat_type == 'vip_clients_and_orders_video':
pr = Prefetch('order_set', queryset=Order.objects.filter(product__title__contains='Видеокарта'))
clients = Client.objects.prefetch_related(pr).filter(vip=True)
result = {}
for client in clients:
order_list = list(client.order_set.values_list('product__title', 'count'))
if order_list:
result[client.title] = order_list
# Полная стоимость каждого товара на складе
if stat_type == 'products_cost':
products_with_coast = Product.objects.annotate(coast=F('price')*F('balance'))
result = {'Список товаров': [
{
'Товар': p.title,
'Полная строимость (цена*остаток)': p.coast
} for p in products_with_coast
]}
print(result)
return JsonResponse(result)
@api_view(['GET'])
def get_clients(request):
clients = Client.objects.all()
serializer = SimpleClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['POST'])
def create_client(request):
serializer = SimpleClientSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response('Запрос принят и успешно обработан')
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_detailed_clients(request):
clients = Client.objects.all()
serializer = ClientSerializer(clients, many=True)
return Response(serializer.data)
@api_view(['GET', 'POST'])
def person_demo(request):
if request.method == 'GET':
person = Person('Сергей Лебидко', 'm', 35, '[email protected]')
serializer = PersonSerializer(person)
return Response([serializer.data])
if request.method == 'POST':
person_for_update = Person(name='Вася Пупкин', gender='m', age=15, email='[email protected]')
original_data = str(person_for_update)
serializer = PersonSerializer(data=request.data, instance=person_for_update)
if serializer.is_valid():
return Response({
'Данные до обновления экземпляра': original_data,
'Валидированные данные': serializer.validated_data,
'Данные после обновления экземпляра': str(serializer.save())
})
else:
return Response(serializer.errors)
@api_view(['GET'])
def get_urls_list(request):
from .urls import urlpatterns
return Response([str(url) for url in urlpatterns])
class ProductViewSet(ModelViewSet):
queryset = Product.objects.all()
serializer_class = ProductSerializer
class ClientViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin, CreateModelMixin, DestroyModelMixin):
queryset = Client.objects.all()
serializer_class = SimpleClientSerializer
class OrderViewSet(ModelViewSet):
queryset = Order.objects.all()
serializer_class = OrderSerializer
# Класс-контроллер входа на сайт
class Login(LoginView):
template_name = 'main/login.html'
# Контроллер отображает страницу текущего пользователя
@require_GET
def show_current_user(request):
return render(request, 'main/current_user.html', {})
# Класс-контроллер выхода с сайта
class Logout(LogoutView):
next_page = reverse_lazy('show_current_user')
@api_view(['GET'])
@permission_classes([MyPermis | sion])
def test_permission(request):
return Response('Тест разрешений выполнен успешно')
@api_view(['GET'])
def clear_ord | identifier_body |
|
main.rs | (?P<ref>\w+))?:(?P<script>.+)$").unwrap();
static ref GIT_SOURCE_REGEX: Regex =
Regex::new(r"^(?P<repo>((git|ssh|http(s)?)|(git@[\w\.]+))(:(//)?)([\w\./\-~]+)(\.git)?(/)?)(@(?P<ref>\w+))?:(?P<script>.+)$")
.unwrap();
}
#[derive(Clap, Debug)]
#[clap(author, about, version)]
#[clap(global_setting = AppSettings::ColoredHelp)]
#[clap(setting = AppSettings::DeriveDisplayOrder)]
#[clap(setting = AppSettings::SubcommandRequiredElseHelp)]
struct Opts {
#[clap(subcommand)]
command: Command,
}
const SCRIPT_HELP: &'static str = r"Script identifier for a script from a repository
For saved repos: `<repo>[@ref]:<script_path>`
Example: `myscripts:hello.bash`
Example (w/ ref): `[email protected]:hello.bash`
For git repos: `git@<repo_url>[@ref]:<script_path>`
Example: `[email protected]:user/myscripts:hello.bash`
Example (w/ ref): `[email protected]:user/myscripts@main:hello.bash`
";
#[derive(Clap, Debug)]
enum Command {
/// Read and modify locally saved repositories
Repo {
#[clap(subcommand)]
command: RepoCommand,
},
/// Run a script using the locally installed bash shell
Run {
/// Force a fresh download of the script (only for raw git repositories)
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to run", long_about = SCRIPT_HELP)]
script: String,
/// Args to be passed to the script
#[clap(about = "Args to be passed to the script")]
args: Vec<String>,
},
/// Import a script and print it to stdout
Import {
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to import", long_about = SCRIPT_HELP)]
script: String,
},
}
#[derive(Clap, Debug)]
enum RepoCommand {
/// List all locally saved repositories
#[clap(alias = "ls")]
List,
/// Add a repository to the local repository list
Add {
/// Local alias for the repository to add
name: String,
/// URI of the repository to add
uri: String,
/// Username for the repository (if required)
#[clap(long, short)]
username: Option<String>,
/// Password or token for the repository (if required)
#[clap(long, short)]
password: Option<String>,
/// Reads the password from the given environment variable when the repo is used
#[clap(long)]
password_env: Option<String>,
/// Reads the password or token from stdin
#[clap(long)]
password_stdin: bool,
},
/// Remove a repository from the local repository list
#[clap(alias = "rm")]
Remove {
/// Local alias for the repository to remove
name: String,
},
}
#[derive(PartialEq)]
pub enum Password {
Saved(String),
FromEnv(String, String),
None,
}
#[tokio::main]
async fn main() -> Result<()> {
openssl_probe::init_ssl_cert_env_vars();
let mut config = config::load_config().await?;
match Opts::parse().command {
Command::Repo { command } => match command {
RepoCommand::List => {
if config.repo.is_empty() {
println!("No Saved repositories.");
return Ok(());
}
println!("Saved repositories:");
for (k, v) in config.repo {
println!(" {} ({} | {})", k, v.provider(), v.readable());
}
}
RepoCommand::Add {
name,
uri,
username,
password,
password_env,
password_stdin,
} => {
if config.repo.contains_key(&name) {
bail!("A repository with the name `{}` already exists", &name);
}
let password_for_parse = match (password, password_env, password_stdin) {
(Some(pass), _, _) => Password::Saved(pass),
(_, Some(var), _) => Password::FromEnv(var.clone(), env::var(var)?),
(_, _, true) => {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf)?;
Password::Saved(buf)
}
_ => Password::None,
};
let repo = validate_api_repo(&uri, username, password_for_parse).await?;
config.repo.insert(name.clone(), repo);
save_config(&config)
.await
.context("Failed to save updated config")?;
println!("Repo `{}` was successfully added", &name);
}
RepoCommand::Remove { name } => {
if !config.repo.contains_key(&name) {
bail!("Repo `{}` was not found", &name);
}
config.repo.remove(&name);
save_config(&config)
.await
.context("Failed to save updated config")?;
println!("Repo `{}` was removed", &name);
}
},
Command::Run {
script,
args,
fresh,
} => {
let src = ScriptSource::parse(&script, ScriptAction::Run)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
let args = args.iter().map(|s| &**s).collect();
// TODO(happens): Find a way to propagate the actual exit code
// instead of simply returning 0/1 depending on the script.
// This should cover most use cases if you just want to know
// if the script failed, but until `std::process::Termination`
// is stabilized, it seems unsafe to use `std::process::exit`
// since we're using a tokio main.
let exit = repo::run_script(&contents, args).await?;
if !exit.success() {
bail!("");
}
}
Command::Import { script, fresh } => {
let src = ScriptSource::parse(&script, ScriptAction::Import)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
repo::import_script(&contents).await?;
}
};
Ok(())
}
enum ScriptAction {
Run,
Import,
}
pub struct ScriptSource {
repo: String,
source_type: SourceType,
script_name: String,
rref: Option<String>,
action: ScriptAction,
}
enum | {
Git,
Saved,
}
impl ScriptSource {
fn parse(script: &str, action: ScriptAction) -> Result<ScriptSource> {
if let Some(matches) = API_SOURCE_REGEX.captures(script) {
let repo = matches
.name("alias")
.expect("No alias matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Saved,
repo,
script_name,
rref,
action,
});
}
if let Some(matches) = GIT_SOURCE_REGEX.captures(script) {
let repo = matches
.name("repo")
.expect("No repo matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Git,
repo,
script_name,
rref,
action,
});
}
bail!("Script source could not be parsed")
}
fn validate_script_name(&self, config: &Config) -> Result<()> {
if config.require_bash_extension.is_none() && config.require_lib_extension.is_none() {
return Ok(());
}
let expected = match (
&config.require_bash_extension,
&config.require_lib_extension,
&self.action,
) {
(Some(ref ext), _, &ScriptAction::Run) => ext,
(_, Some(ext), &ScriptAction::Import) => ext,
_ => unreachable!(),
};
if !self.script_name.ends_with(expected) {
bail!("Expected script name to end with `{}`", expected);
}
Ok(())
}
async fn fetch_script_contents(&self, config: &config::Config, fresh: bool) -> Result<String> {
let repo = match self.source_type {
SourceType::Saved => config
.repo
.get(&self.repo)
.ok_or(anyhow!("Repo `{}` was not found", &self.repo))?
.box_clone(),
SourceType::Git => git::GitRepo::from_src(&self),
};
let rref = self.rref.clone().unwrap_or("HEAD".to_owned());
Ok(repo.fetch_script(&self.script_name, &rref, fresh).await | SourceType | identifier_name |
main.rs | (?P<ref>\w+))?:(?P<script>.+)$").unwrap();
static ref GIT_SOURCE_REGEX: Regex =
Regex::new(r"^(?P<repo>((git|ssh|http(s)?)|(git@[\w\.]+))(:(//)?)([\w\./\-~]+)(\.git)?(/)?)(@(?P<ref>\w+))?:(?P<script>.+)$")
.unwrap();
}
#[derive(Clap, Debug)]
#[clap(author, about, version)]
#[clap(global_setting = AppSettings::ColoredHelp)]
#[clap(setting = AppSettings::DeriveDisplayOrder)]
#[clap(setting = AppSettings::SubcommandRequiredElseHelp)]
struct Opts {
#[clap(subcommand)]
command: Command,
}
const SCRIPT_HELP: &'static str = r"Script identifier for a script from a repository
For saved repos: `<repo>[@ref]:<script_path>`
Example: `myscripts:hello.bash`
Example (w/ ref): `[email protected]:hello.bash`
For git repos: `git@<repo_url>[@ref]:<script_path>`
Example: `[email protected]:user/myscripts:hello.bash`
Example (w/ ref): `[email protected]:user/myscripts@main:hello.bash`
";
#[derive(Clap, Debug)]
enum Command {
/// Read and modify locally saved repositories
Repo {
#[clap(subcommand)]
command: RepoCommand,
},
/// Run a script using the locally installed bash shell
Run {
/// Force a fresh download of the script (only for raw git repositories)
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to run", long_about = SCRIPT_HELP)]
script: String,
/// Args to be passed to the script
#[clap(about = "Args to be passed to the script")]
args: Vec<String>,
},
/// Import a script and print it to stdout
Import {
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to import", long_about = SCRIPT_HELP)]
script: String,
},
}
#[derive(Clap, Debug)]
enum RepoCommand {
/// List all locally saved repositories
#[clap(alias = "ls")]
List,
/// Add a repository to the local repository list
Add {
/// Local alias for the repository to add
name: String,
/// URI of the repository to add
uri: String,
/// Username for the repository (if required)
#[clap(long, short)]
username: Option<String>,
/// Password or token for the repository (if required)
#[clap(long, short)]
password: Option<String>,
/// Reads the password from the given environment variable when the repo is used
#[clap(long)]
password_env: Option<String>,
/// Reads the password or token from stdin
#[clap(long)]
password_stdin: bool,
},
/// Remove a repository from the local repository list
#[clap(alias = "rm")]
Remove {
/// Local alias for the repository to remove
name: String,
},
}
#[derive(PartialEq)]
pub enum Password {
Saved(String),
FromEnv(String, String),
None,
}
#[tokio::main]
async fn main() -> Result<()> {
openssl_probe::init_ssl_cert_env_vars();
let mut config = config::load_config().await?;
match Opts::parse().command {
Command::Repo { command } => match command {
RepoCommand::List => {
if config.repo.is_empty() {
println!("No Saved repositories.");
return Ok(());
}
println!("Saved repositories:");
for (k, v) in config.repo {
println!(" {} ({} | {})", k, v.provider(), v.readable());
}
}
RepoCommand::Add {
name,
uri,
username,
password,
password_env,
password_stdin,
} => {
if config.repo.contains_key(&name) {
bail!("A repository with the name `{}` already exists", &name);
}
let password_for_parse = match (password, password_env, password_stdin) {
(Some(pass), _, _) => Password::Saved(pass),
(_, Some(var), _) => Password::FromEnv(var.clone(), env::var(var)?),
(_, _, true) => {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf)?;
Password::Saved(buf)
}
_ => Password::None,
};
let repo = validate_api_repo(&uri, username, password_for_parse).await?;
config.repo.insert(name.clone(), repo);
save_config(&config)
.await
.context("Failed to save updated config")?;
println!("Repo `{}` was successfully added", &name);
}
RepoCommand::Remove { name } => {
if !config.repo.contains_key(&name) {
bail!("Repo `{}` was not found", &name);
}
config.repo.remove(&name);
save_config(&config)
.await
.context("Failed to save updated config")?;
println!("Repo `{}` was removed", &name);
}
},
Command::Run {
script,
args,
fresh,
} => {
let src = ScriptSource::parse(&script, ScriptAction::Run)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
let args = args.iter().map(|s| &**s).collect();
// TODO(happens): Find a way to propagate the actual exit code
// instead of simply returning 0/1 depending on the script.
// This should cover most use cases if you just want to know
// if the script failed, but until `std::process::Termination`
// is stabilized, it seems unsafe to use `std::process::exit`
// since we're using a tokio main.
let exit = repo::run_script(&contents, args).await?;
if !exit.success() {
bail!("");
}
}
Command::Import { script, fresh } => {
let src = ScriptSource::parse(&script, ScriptAction::Import)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
repo::import_script(&contents).await?;
}
};
Ok(())
}
enum ScriptAction {
Run,
Import,
}
pub struct ScriptSource {
repo: String,
source_type: SourceType,
script_name: String,
rref: Option<String>,
action: ScriptAction,
}
enum SourceType {
Git,
Saved,
}
impl ScriptSource {
fn parse(script: &str, action: ScriptAction) -> Result<ScriptSource> {
if let Some(matches) = API_SOURCE_REGEX.captures(script) {
let repo = matches
.name("alias")
.expect("No alias matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Saved,
repo,
script_name,
rref,
action,
});
}
if let Some(matches) = GIT_SOURCE_REGEX.captures(script) {
let repo = matches
.name("repo")
.expect("No repo matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Git,
repo,
script_name,
rref,
action,
});
}
bail!("Script source could not be parsed")
}
fn validate_script_name(&self, config: &Config) -> Result<()> {
if config.require_bash_extension.is_none() && config.require_lib_extension.is_none() {
return Ok(());
}
let expected = match (
&config.require_bash_extension,
&config.require_lib_extension,
&self.action,
) {
(Some(ref ext), _, &ScriptAction::Run) => ext,
(_, Some(ext), &ScriptAction::Import) => ext,
_ => unreachable!(),
};
if !self.script_name.ends_with(expected) {
bail!("Expected script name to end with `{}`", expected);
}
Ok(())
}
async fn fetch_script_contents(&self, config: &config::Config, fresh: bool) -> Result<String> {
let repo = match self.source_type {
SourceType::Saved => config
.repo | SourceType::Git => git::GitRepo::from_src(&self),
};
let rref = self.rref.clone().unwrap_or("HEAD".to_owned());
Ok(repo.fetch_script(&self.script_name, &rref, fresh).await?)
| .get(&self.repo)
.ok_or(anyhow!("Repo `{}` was not found", &self.repo))?
.box_clone(), | random_line_split |
main.rs | (?P<ref>\w+))?:(?P<script>.+)$").unwrap();
static ref GIT_SOURCE_REGEX: Regex =
Regex::new(r"^(?P<repo>((git|ssh|http(s)?)|(git@[\w\.]+))(:(//)?)([\w\./\-~]+)(\.git)?(/)?)(@(?P<ref>\w+))?:(?P<script>.+)$")
.unwrap();
}
#[derive(Clap, Debug)]
#[clap(author, about, version)]
#[clap(global_setting = AppSettings::ColoredHelp)]
#[clap(setting = AppSettings::DeriveDisplayOrder)]
#[clap(setting = AppSettings::SubcommandRequiredElseHelp)]
struct Opts {
#[clap(subcommand)]
command: Command,
}
const SCRIPT_HELP: &'static str = r"Script identifier for a script from a repository
For saved repos: `<repo>[@ref]:<script_path>`
Example: `myscripts:hello.bash`
Example (w/ ref): `[email protected]:hello.bash`
For git repos: `git@<repo_url>[@ref]:<script_path>`
Example: `[email protected]:user/myscripts:hello.bash`
Example (w/ ref): `[email protected]:user/myscripts@main:hello.bash`
";
#[derive(Clap, Debug)]
enum Command {
/// Read and modify locally saved repositories
Repo {
#[clap(subcommand)]
command: RepoCommand,
},
/// Run a script using the locally installed bash shell
Run {
/// Force a fresh download of the script (only for raw git repositories)
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to run", long_about = SCRIPT_HELP)]
script: String,
/// Args to be passed to the script
#[clap(about = "Args to be passed to the script")]
args: Vec<String>,
},
/// Import a script and print it to stdout
Import {
#[clap(short, long)]
fresh: bool,
#[clap(about = "Script to import", long_about = SCRIPT_HELP)]
script: String,
},
}
#[derive(Clap, Debug)]
enum RepoCommand {
/// List all locally saved repositories
#[clap(alias = "ls")]
List,
/// Add a repository to the local repository list
Add {
/// Local alias for the repository to add
name: String,
/// URI of the repository to add
uri: String,
/// Username for the repository (if required)
#[clap(long, short)]
username: Option<String>,
/// Password or token for the repository (if required)
#[clap(long, short)]
password: Option<String>,
/// Reads the password from the given environment variable when the repo is used
#[clap(long)]
password_env: Option<String>,
/// Reads the password or token from stdin
#[clap(long)]
password_stdin: bool,
},
/// Remove a repository from the local repository list
#[clap(alias = "rm")]
Remove {
/// Local alias for the repository to remove
name: String,
},
}
#[derive(PartialEq)]
pub enum Password {
Saved(String),
FromEnv(String, String),
None,
}
#[tokio::main]
async fn main() -> Result<()> {
openssl_probe::init_ssl_cert_env_vars();
let mut config = config::load_config().await?;
match Opts::parse().command {
Command::Repo { command } => match command {
RepoCommand::List => {
if config.repo.is_empty() {
println!("No Saved repositories.");
return Ok(());
}
println!("Saved repositories:");
for (k, v) in config.repo {
println!(" {} ({} | {})", k, v.provider(), v.readable());
}
}
RepoCommand::Add {
name,
uri,
username,
password,
password_env,
password_stdin,
} => | .context("Failed to save updated config")?;
println!("Repo `{}` was successfully added", &name);
}
RepoCommand::Remove { name } => {
if !config.repo.contains_key(&name) {
bail!("Repo `{}` was not found", &name);
}
config.repo.remove(&name);
save_config(&config)
.await
.context("Failed to save updated config")?;
println!("Repo `{}` was removed", &name);
}
},
Command::Run {
script,
args,
fresh,
} => {
let src = ScriptSource::parse(&script, ScriptAction::Run)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
let args = args.iter().map(|s| &**s).collect();
// TODO(happens): Find a way to propagate the actual exit code
// instead of simply returning 0/1 depending on the script.
// This should cover most use cases if you just want to know
// if the script failed, but until `std::process::Termination`
// is stabilized, it seems unsafe to use `std::process::exit`
// since we're using a tokio main.
let exit = repo::run_script(&contents, args).await?;
if !exit.success() {
bail!("");
}
}
Command::Import { script, fresh } => {
let src = ScriptSource::parse(&script, ScriptAction::Import)?;
src.validate_script_name(&config)?;
let contents = src.fetch_script_contents(&config, fresh).await?;
repo::import_script(&contents).await?;
}
};
Ok(())
}
enum ScriptAction {
Run,
Import,
}
pub struct ScriptSource {
repo: String,
source_type: SourceType,
script_name: String,
rref: Option<String>,
action: ScriptAction,
}
enum SourceType {
Git,
Saved,
}
impl ScriptSource {
fn parse(script: &str, action: ScriptAction) -> Result<ScriptSource> {
if let Some(matches) = API_SOURCE_REGEX.captures(script) {
let repo = matches
.name("alias")
.expect("No alias matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Saved,
repo,
script_name,
rref,
action,
});
}
if let Some(matches) = GIT_SOURCE_REGEX.captures(script) {
let repo = matches
.name("repo")
.expect("No repo matched")
.as_str()
.to_owned();
let script_name = matches
.name("script")
.expect("No script name matched")
.as_str()
.to_owned();
let rref = matches.name("ref").map(|rref| rref.as_str().to_owned());
return Ok(Self {
source_type: SourceType::Git,
repo,
script_name,
rref,
action,
});
}
bail!("Script source could not be parsed")
}
fn validate_script_name(&self, config: &Config) -> Result<()> {
if config.require_bash_extension.is_none() && config.require_lib_extension.is_none() {
return Ok(());
}
let expected = match (
&config.require_bash_extension,
&config.require_lib_extension,
&self.action,
) {
(Some(ref ext), _, &ScriptAction::Run) => ext,
(_, Some(ext), &ScriptAction::Import) => ext,
_ => unreachable!(),
};
if !self.script_name.ends_with(expected) {
bail!("Expected script name to end with `{}`", expected);
}
Ok(())
}
async fn fetch_script_contents(&self, config: &config::Config, fresh: bool) -> Result<String> {
let repo = match self.source_type {
SourceType::Saved => config
.repo
.get(&self.repo)
.ok_or(anyhow!("Repo `{}` was not found", &self.repo))?
.box_clone(),
SourceType::Git => git::GitRepo::from_src(&self),
};
let rref = self.rref.clone().unwrap_or("HEAD".to_owned());
Ok(repo.fetch_script(&self.script_name, &rref, fresh).await | {
if config.repo.contains_key(&name) {
bail!("A repository with the name `{}` already exists", &name);
}
let password_for_parse = match (password, password_env, password_stdin) {
(Some(pass), _, _) => Password::Saved(pass),
(_, Some(var), _) => Password::FromEnv(var.clone(), env::var(var)?),
(_, _, true) => {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf)?;
Password::Saved(buf)
}
_ => Password::None,
};
let repo = validate_api_repo(&uri, username, password_for_parse).await?;
config.repo.insert(name.clone(), repo);
save_config(&config)
.await | conditional_block |
ledger_manager.rs | 256>,
pub tx_confirmed: HashSet<H256>,
pub tx_count: usize,
}
//ledger-manager will periodically loop and confirm the transactions
pub struct LedgerManager {
pub ledger_manager_state: LedgerManagerState,
pub blockchain: Arc<Mutex<Blockchain>>,
pub utxo_state: Arc<Mutex<UtxoState>>,
pub voter_depth_k: u32,
}
impl LedgerManager {
pub fn new(blockchain: &Arc<Mutex<Blockchain>>, utxo_state: &Arc<Mutex<UtxoState>>, k: u32) -> Self {
let ledger_manager_state = LedgerManagerState{
last_level_processed: 1,
proposer_blocks_processed: HashSet::new(),
leader_sequence: Vec::new(),
tx_confirmed: HashSet::new(),
tx_count: 0,
};
LedgerManager {
ledger_manager_state: ledger_manager_state,
blockchain: Arc::clone(blockchain),
utxo_state: Arc::clone(utxo_state),
voter_depth_k: k,
}
}
pub fn start(mut self) {
thread::Builder::new()
.name("ledger_manager".to_string())
.spawn(move || {
self.ledger_manager_loop();
})
.unwrap();
}
//Three Steps
//1. Get the leader sequence
//2. Get Transaction sequence
//3. Sanitize Tx and update UTXO state
//All 3 steps are done in the loop
//
fn ledger_manager_loop(&mut self) {
loop{
//Step 1
//let leader_sequence = self.get_leader_sequence();
//This one uses the algorithm described in Prism Paper
let leader_sequence = self.get_confirmed_leader_sequence();
//Step 2
let tx_sequence = self.get_transaction_sequence(&leader_sequence);
//Step 3
self.confirm_transactions(&tx_sequence);
thread::sleep(Duration::from_secs(1));
}
}
fn get_leader_sequence(&mut self) -> Vec<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut leader_sequence: Vec<H256> = vec![];
//TODO: This is a workaround for now till we have some DS which asserts that
//all voter chains at a particular level has voted
// level2votes: how many votes have been casted at level i
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
for level in level_start..level_end {
let proposers = &locked_blockchain.level2allproposers[&level];
let mut max_vote_count = 0;
let mut leader: H256 = [0; 32].into();
//Assumption: if a vote for proposer not present, assumed to be 0
//When above TODO is done, then this will not be needed or modified accordingly
for proposer in proposers {
if locked_blockchain.proposer2votecount.contains_key(proposer) {
let vote_count = locked_blockchain.proposer2votecount[proposer];
if vote_count > max_vote_count {
max_vote_count = vote_count;
leader = *proposer;
}
}
}
//break out as there is no point going forward as no leader found at this level
if max_vote_count == 0 {
break;
}
println!("Adding leader at level {}, leader hash: {:?}, max votes: {}", level, leader, max_vote_count);
leader_sequence.push(leader);
self.ledger_manager_state.leader_sequence.push(leader);
println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
leader_sequence
}
fn get_confirmed_leader_sequence(&mut self) -> Vec<H256> {
let mut leader_sequence: Vec<H256> = vec![];
//Locking Blockchain to get proposer_depth currently. Then dropping the lock
//Will be holding locj for each level processing inside the subroutine
let locked_blockchain = self.blockchain.lock().unwrap();
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
drop(locked_blockchain);
for level in level_start..level_end {
let leader: Option<H256> = self.confirm_leader(level);
match leader {
Some(leader_hash) => {
println!("Adding leader at level {}, leader hash: {:?}", level, leader_hash);
leader_sequence.push(leader_hash);
// self.ledger_manager_state.leader_sequence.push(leader_hash);
// println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
None => {
println!("Unable to confirm leader at level {}", level);
println!("Returning from get_confirmed_leader_sequence func");
break; // TODO: Will this break out of loop??
}
}
}
leader_sequence
}
//we use the confirmation policy from https://arxiv.org/abs/1810.08092
//This function is heavily borrowed from implementation provided in the actual Prism codebase
//https://github.com/yangl1996/prism-rust/
fn confirm_leader(&mut self, level: u32) -> Option<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let proposer_blocks = &locked_blockchain.level2allproposers[&level];
let mut new_leader: Option<H256> = None;
let num_voter_chains: u32 = locked_blockchain.num_voter_chains;
// for each proposer count the number of confirmed votes i.e. votes that are k-deep (2-deep).
let mut num_confirmed_votes: HashMap<H256, u32> = HashMap::new();
for block in proposer_blocks {
if locked_blockchain.proposer2voterinfo.contains_key(block) | }
num_confirmed_votes.insert(*block, total_k_deep_votes);
}
}
for (proposer, votes) in num_confirmed_votes.iter() {
println!("proposer {:?} votes {}", proposer, *votes);
if *votes > (num_voter_chains / 2) {
new_leader = Some(*proposer);
break;
}
}
new_leader
}
// needs to process parent as well
fn get_transaction_sequence(&mut self, leader_sequence: &Vec<H256>) -> Vec<SignedTransaction> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut tx_sequence: Vec<SignedTransaction> = Vec::new();
//TODO: Should we do it recusrively? Like should we also see references to
//proposer references of leader?
//TODO: Also we should refactor it later
for leader in leader_sequence {
let leader_block = &locked_blockchain.proposer_chain[leader].block;
//processing parent and proposer refs
let mut proposer_refs_to_process: Vec<H256> = Vec::new();
let mut leader_txs: Vec<SignedTransaction> = Vec::new();
match &leader_block.content {
Content::Proposer(content) => {
// parent and proposer_refs of leader
let parent = &content.parent_hash;
let proposer_refs = &content.proposer_refs;
if !self.ledger_manager_state.proposer_blocks_processed.contains(parent) {
proposer_refs_to_process.push(*parent);
}
for proposer_ref in proposer_refs {
if !self.ledger_manager_state.proposer_blocks_processed.contains(proposer_ref) {
proposer_refs_to_process.push(*proposer_ref);
}
}
//txs of leader
leader_txs = content.transactions.clone();
}
_ => {
}
}
//TODO: Do we have to do match in this and previous loop as we know it will always
//match to Proposer(content). Can we unwrap??
for proposer_ref in &proposer_refs_to_process {
let proposer_block = &locked_blockchain.prop | {
//TODO: We might also need number of voter blocks at a particular level of a voter chain
//This is not urgent as we can **assume**, there is one block at each level
let voters_info = &locked_blockchain.proposer2voterinfo[block];
if voters_info.len() < (num_voter_chains as usize / 2) {
println!("number of votes for {:?} is {}", block, voters_info.len());
continue;
}
let mut total_k_deep_votes: u32 = 0;
for (voter_chain, voter_block) in voters_info {
let voter_block_level = locked_blockchain.voter_chains[(*voter_chain-1) as usize][voter_block].level;
let voter_chain_level = locked_blockchain.voter_depths[(*voter_chain-1) as usize];
let this_vote_depth = voter_chain_level - voter_block_level;
if this_vote_depth >= self.voter_depth_k {
total_k_deep_votes += 1;
} | conditional_block |
ledger_manager.rs | {
pub last_level_processed: u32,
pub leader_sequence: Vec<H256>,
pub proposer_blocks_processed: HashSet<H256>,
pub tx_confirmed: HashSet<H256>,
pub tx_count: usize,
}
//ledger-manager will periodically loop and confirm the transactions
pub struct LedgerManager {
pub ledger_manager_state: LedgerManagerState,
pub blockchain: Arc<Mutex<Blockchain>>,
pub utxo_state: Arc<Mutex<UtxoState>>,
pub voter_depth_k: u32,
}
impl LedgerManager {
pub fn new(blockchain: &Arc<Mutex<Blockchain>>, utxo_state: &Arc<Mutex<UtxoState>>, k: u32) -> Self {
let ledger_manager_state = LedgerManagerState{
last_level_processed: 1,
proposer_blocks_processed: HashSet::new(),
leader_sequence: Vec::new(),
tx_confirmed: HashSet::new(),
tx_count: 0,
};
LedgerManager {
ledger_manager_state: ledger_manager_state,
blockchain: Arc::clone(blockchain),
utxo_state: Arc::clone(utxo_state),
voter_depth_k: k,
}
}
pub fn start(mut self) {
thread::Builder::new()
.name("ledger_manager".to_string())
.spawn(move || {
self.ledger_manager_loop();
})
.unwrap();
}
//Three Steps
//1. Get the leader sequence
//2. Get Transaction sequence
//3. Sanitize Tx and update UTXO state
//All 3 steps are done in the loop
//
fn ledger_manager_loop(&mut self) {
loop{
//Step 1
//let leader_sequence = self.get_leader_sequence();
//This one uses the algorithm described in Prism Paper
let leader_sequence = self.get_confirmed_leader_sequence();
//Step 2
let tx_sequence = self.get_transaction_sequence(&leader_sequence);
//Step 3
self.confirm_transactions(&tx_sequence);
thread::sleep(Duration::from_secs(1));
}
}
fn get_leader_sequence(&mut self) -> Vec<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut leader_sequence: Vec<H256> = vec![];
//TODO: This is a workaround for now till we have some DS which asserts that
//all voter chains at a particular level has voted
// level2votes: how many votes have been casted at level i
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
for level in level_start..level_end {
let proposers = &locked_blockchain.level2allproposers[&level];
let mut max_vote_count = 0;
let mut leader: H256 = [0; 32].into();
//Assumption: if a vote for proposer not present, assumed to be 0
//When above TODO is done, then this will not be needed or modified accordingly
for proposer in proposers {
if locked_blockchain.proposer2votecount.contains_key(proposer) {
let vote_count = locked_blockchain.proposer2votecount[proposer];
if vote_count > max_vote_count {
max_vote_count = vote_count;
leader = *proposer;
}
}
}
//break out as there is no point going forward as no leader found at this level
if max_vote_count == 0 {
break;
}
println!("Adding leader at level {}, leader hash: {:?}, max votes: {}", level, leader, max_vote_count);
leader_sequence.push(leader);
self.ledger_manager_state.leader_sequence.push(leader);
println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
leader_sequence
}
fn get_confirmed_leader_sequence(&mut self) -> Vec<H256> {
let mut leader_sequence: Vec<H256> = vec![];
//Locking Blockchain to get proposer_depth currently. Then dropping the lock
//Will be holding locj for each level processing inside the subroutine
let locked_blockchain = self.blockchain.lock().unwrap();
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
drop(locked_blockchain);
for level in level_start..level_end {
let leader: Option<H256> = self.confirm_leader(level);
match leader {
Some(leader_hash) => {
println!("Adding leader at level {}, leader hash: {:?}", level, leader_hash);
leader_sequence.push(leader_hash);
// self.ledger_manager_state.leader_sequence.push(leader_hash);
// println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
None => {
println!("Unable to confirm leader at level {}", level);
println!("Returning from get_confirmed_leader_sequence func");
break; // TODO: Will this break out of loop??
}
}
}
leader_sequence
}
//we use the confirmation policy from https://arxiv.org/abs/1810.08092
//This function is heavily borrowed from implementation provided in the actual Prism codebase
//https://github.com/yangl1996/prism-rust/
fn confirm_leader(&mut self, level: u32) -> Option<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let proposer_blocks = &locked_blockchain.level2allproposers[&level];
let mut new_leader: Option<H256> = None;
let num_voter_chains: u32 = locked_blockchain.num_voter_chains;
// for each proposer count the number of confirmed votes i.e. votes that are k-deep (2-deep).
let mut num_confirmed_votes: HashMap<H256, u32> = HashMap::new();
for block in proposer_blocks {
if locked_blockchain.proposer2voterinfo.contains_key(block) {
//TODO: We might also need number of voter blocks at a particular level of a voter chain
//This is not urgent as we can **assume**, there is one block at each level
let voters_info = &locked_blockchain.proposer2voterinfo[block];
if voters_info.len() < (num_voter_chains as usize / 2) {
println!("number of votes for {:?} is {}", block, voters_info.len());
continue;
}
let mut total_k_deep_votes: u32 = 0;
for (voter_chain, voter_block) in voters_info {
let voter_block_level = locked_blockchain.voter_chains[(*voter_chain-1) as usize][voter_block].level;
let voter_chain_level = locked_blockchain.voter_depths[(*voter_chain-1) as usize];
let this_vote_depth = voter_chain_level - voter_block_level;
if this_vote_depth >= self.voter_depth_k {
total_k_deep_votes += 1;
}
}
num_confirmed_votes.insert(*block, total_k_deep_votes);
}
}
for (proposer, votes) in num_confirmed_votes.iter() {
println!("proposer {:?} votes {}", proposer, *votes);
if *votes > (num_voter_chains / 2) {
new_leader = Some(*proposer);
break;
}
}
new_leader
}
// needs to process parent as well
fn get_transaction_sequence(&mut self, leader_sequence: &Vec<H256>) -> Vec<SignedTransaction> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut tx_sequence: Vec<SignedTransaction> = Vec::new();
//TODO: Should we do it recusrively? Like should we also see references to
//proposer references of leader?
//TODO: Also we should refactor it later
for leader in leader_sequence {
let leader_block = &locked_blockchain.proposer_chain[leader].block;
//processing parent and proposer refs
let mut proposer_refs_to_process: Vec<H256> = Vec::new();
let mut leader_txs: Vec<SignedTransaction> = Vec::new();
match &leader_block.content {
Content::Proposer(content) => {
// parent and proposer_refs of leader
let parent = &content.parent_hash;
let proposer_refs = &content.proposer_refs;
if !self.ledger_manager_state.proposer_blocks_processed.contains(parent) {
proposer_refs_to_process.push(*parent);
}
for proposer_ref in proposer_refs {
if !self.ledger_manager_state.proposer_blocks_processed.contains(proposer_ref) {
proposer_refs_to_process.push(*proposer_ref);
}
}
//txs of leader
leader_txs = content.transactions.clone();
}
_ => {
}
}
//TODO: Do we have to do match in this and previous loop as we know it will always
//match | LedgerManagerState | identifier_name |
|
ledger_manager.rs | 256>,
pub tx_confirmed: HashSet<H256>,
pub tx_count: usize,
}
//ledger-manager will periodically loop and confirm the transactions
pub struct LedgerManager {
pub ledger_manager_state: LedgerManagerState,
pub blockchain: Arc<Mutex<Blockchain>>,
pub utxo_state: Arc<Mutex<UtxoState>>,
pub voter_depth_k: u32,
}
impl LedgerManager {
pub fn new(blockchain: &Arc<Mutex<Blockchain>>, utxo_state: &Arc<Mutex<UtxoState>>, k: u32) -> Self {
let ledger_manager_state = LedgerManagerState{
last_level_processed: 1,
proposer_blocks_processed: HashSet::new(),
leader_sequence: Vec::new(),
tx_confirmed: HashSet::new(),
tx_count: 0,
};
LedgerManager {
ledger_manager_state: ledger_manager_state,
blockchain: Arc::clone(blockchain),
utxo_state: Arc::clone(utxo_state),
voter_depth_k: k,
}
}
pub fn start(mut self) {
thread::Builder::new()
.name("ledger_manager".to_string())
.spawn(move || {
self.ledger_manager_loop();
})
.unwrap();
}
//Three Steps
//1. Get the leader sequence
//2. Get Transaction sequence
//3. Sanitize Tx and update UTXO state
//All 3 steps are done in the loop
//
fn ledger_manager_loop(&mut self) |
fn get_leader_sequence(&mut self) -> Vec<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut leader_sequence: Vec<H256> = vec![];
//TODO: This is a workaround for now till we have some DS which asserts that
//all voter chains at a particular level has voted
// level2votes: how many votes have been casted at level i
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
for level in level_start..level_end {
let proposers = &locked_blockchain.level2allproposers[&level];
let mut max_vote_count = 0;
let mut leader: H256 = [0; 32].into();
//Assumption: if a vote for proposer not present, assumed to be 0
//When above TODO is done, then this will not be needed or modified accordingly
for proposer in proposers {
if locked_blockchain.proposer2votecount.contains_key(proposer) {
let vote_count = locked_blockchain.proposer2votecount[proposer];
if vote_count > max_vote_count {
max_vote_count = vote_count;
leader = *proposer;
}
}
}
//break out as there is no point going forward as no leader found at this level
if max_vote_count == 0 {
break;
}
println!("Adding leader at level {}, leader hash: {:?}, max votes: {}", level, leader, max_vote_count);
leader_sequence.push(leader);
self.ledger_manager_state.leader_sequence.push(leader);
println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
leader_sequence
}
fn get_confirmed_leader_sequence(&mut self) -> Vec<H256> {
let mut leader_sequence: Vec<H256> = vec![];
//Locking Blockchain to get proposer_depth currently. Then dropping the lock
//Will be holding locj for each level processing inside the subroutine
let locked_blockchain = self.blockchain.lock().unwrap();
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
drop(locked_blockchain);
for level in level_start..level_end {
let leader: Option<H256> = self.confirm_leader(level);
match leader {
Some(leader_hash) => {
println!("Adding leader at level {}, leader hash: {:?}", level, leader_hash);
leader_sequence.push(leader_hash);
// self.ledger_manager_state.leader_sequence.push(leader_hash);
// println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
None => {
println!("Unable to confirm leader at level {}", level);
println!("Returning from get_confirmed_leader_sequence func");
break; // TODO: Will this break out of loop??
}
}
}
leader_sequence
}
//we use the confirmation policy from https://arxiv.org/abs/1810.08092
//This function is heavily borrowed from implementation provided in the actual Prism codebase
//https://github.com/yangl1996/prism-rust/
fn confirm_leader(&mut self, level: u32) -> Option<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let proposer_blocks = &locked_blockchain.level2allproposers[&level];
let mut new_leader: Option<H256> = None;
let num_voter_chains: u32 = locked_blockchain.num_voter_chains;
// for each proposer count the number of confirmed votes i.e. votes that are k-deep (2-deep).
let mut num_confirmed_votes: HashMap<H256, u32> = HashMap::new();
for block in proposer_blocks {
if locked_blockchain.proposer2voterinfo.contains_key(block) {
//TODO: We might also need number of voter blocks at a particular level of a voter chain
//This is not urgent as we can **assume**, there is one block at each level
let voters_info = &locked_blockchain.proposer2voterinfo[block];
if voters_info.len() < (num_voter_chains as usize / 2) {
println!("number of votes for {:?} is {}", block, voters_info.len());
continue;
}
let mut total_k_deep_votes: u32 = 0;
for (voter_chain, voter_block) in voters_info {
let voter_block_level = locked_blockchain.voter_chains[(*voter_chain-1) as usize][voter_block].level;
let voter_chain_level = locked_blockchain.voter_depths[(*voter_chain-1) as usize];
let this_vote_depth = voter_chain_level - voter_block_level;
if this_vote_depth >= self.voter_depth_k {
total_k_deep_votes += 1;
}
}
num_confirmed_votes.insert(*block, total_k_deep_votes);
}
}
for (proposer, votes) in num_confirmed_votes.iter() {
println!("proposer {:?} votes {}", proposer, *votes);
if *votes > (num_voter_chains / 2) {
new_leader = Some(*proposer);
break;
}
}
new_leader
}
// needs to process parent as well
fn get_transaction_sequence(&mut self, leader_sequence: &Vec<H256>) -> Vec<SignedTransaction> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut tx_sequence: Vec<SignedTransaction> = Vec::new();
//TODO: Should we do it recusrively? Like should we also see references to
//proposer references of leader?
//TODO: Also we should refactor it later
for leader in leader_sequence {
let leader_block = &locked_blockchain.proposer_chain[leader].block;
//processing parent and proposer refs
let mut proposer_refs_to_process: Vec<H256> = Vec::new();
let mut leader_txs: Vec<SignedTransaction> = Vec::new();
match &leader_block.content {
Content::Proposer(content) => {
// parent and proposer_refs of leader
let parent = &content.parent_hash;
let proposer_refs = &content.proposer_refs;
if !self.ledger_manager_state.proposer_blocks_processed.contains(parent) {
proposer_refs_to_process.push(*parent);
}
for proposer_ref in proposer_refs {
if !self.ledger_manager_state.proposer_blocks_processed.contains(proposer_ref) {
proposer_refs_to_process.push(*proposer_ref);
}
}
//txs of leader
leader_txs = content.transactions.clone();
}
_ => {
}
}
//TODO: Do we have to do match in this and previous loop as we know it will always
//match to Proposer(content). Can we unwrap??
for proposer_ref in &proposer_refs_to_process {
let proposer_block = &locked_blockchain | {
loop{
//Step 1
//let leader_sequence = self.get_leader_sequence();
//This one uses the algorithm described in Prism Paper
let leader_sequence = self.get_confirmed_leader_sequence();
//Step 2
let tx_sequence = self.get_transaction_sequence(&leader_sequence);
//Step 3
self.confirm_transactions(&tx_sequence);
thread::sleep(Duration::from_secs(1));
}
} | identifier_body |
ledger_manager.rs | State>>, k: u32) -> Self {
let ledger_manager_state = LedgerManagerState{
last_level_processed: 1,
proposer_blocks_processed: HashSet::new(),
leader_sequence: Vec::new(),
tx_confirmed: HashSet::new(),
tx_count: 0,
};
LedgerManager {
ledger_manager_state: ledger_manager_state,
blockchain: Arc::clone(blockchain),
utxo_state: Arc::clone(utxo_state),
voter_depth_k: k,
}
}
pub fn start(mut self) {
thread::Builder::new()
.name("ledger_manager".to_string())
.spawn(move || {
self.ledger_manager_loop();
})
.unwrap();
}
//Three Steps
//1. Get the leader sequence
//2. Get Transaction sequence
//3. Sanitize Tx and update UTXO state
//All 3 steps are done in the loop
//
fn ledger_manager_loop(&mut self) {
loop{
//Step 1
//let leader_sequence = self.get_leader_sequence();
//This one uses the algorithm described in Prism Paper
let leader_sequence = self.get_confirmed_leader_sequence();
//Step 2
let tx_sequence = self.get_transaction_sequence(&leader_sequence);
//Step 3
self.confirm_transactions(&tx_sequence);
thread::sleep(Duration::from_secs(1));
}
}
fn get_leader_sequence(&mut self) -> Vec<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut leader_sequence: Vec<H256> = vec![];
//TODO: This is a workaround for now till we have some DS which asserts that
//all voter chains at a particular level has voted
// level2votes: how many votes have been casted at level i
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
for level in level_start..level_end {
let proposers = &locked_blockchain.level2allproposers[&level];
let mut max_vote_count = 0;
let mut leader: H256 = [0; 32].into();
//Assumption: if a vote for proposer not present, assumed to be 0
//When above TODO is done, then this will not be needed or modified accordingly
for proposer in proposers {
if locked_blockchain.proposer2votecount.contains_key(proposer) {
let vote_count = locked_blockchain.proposer2votecount[proposer];
if vote_count > max_vote_count {
max_vote_count = vote_count;
leader = *proposer;
}
}
}
//break out as there is no point going forward as no leader found at this level
if max_vote_count == 0 {
break;
}
println!("Adding leader at level {}, leader hash: {:?}, max votes: {}", level, leader, max_vote_count);
leader_sequence.push(leader);
self.ledger_manager_state.leader_sequence.push(leader);
println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
leader_sequence
}
fn get_confirmed_leader_sequence(&mut self) -> Vec<H256> {
let mut leader_sequence: Vec<H256> = vec![];
//Locking Blockchain to get proposer_depth currently. Then dropping the lock
//Will be holding locj for each level processing inside the subroutine
let locked_blockchain = self.blockchain.lock().unwrap();
let level_start = self.ledger_manager_state.last_level_processed + 1;
let level_end = locked_blockchain.proposer_depth + 1;
drop(locked_blockchain);
for level in level_start..level_end {
let leader: Option<H256> = self.confirm_leader(level);
match leader {
Some(leader_hash) => {
println!("Adding leader at level {}, leader hash: {:?}", level, leader_hash);
leader_sequence.push(leader_hash);
// self.ledger_manager_state.leader_sequence.push(leader_hash);
// println!("Leader sequence: {:?}", self.ledger_manager_state.leader_sequence);
self.ledger_manager_state.last_level_processed = level;
}
None => {
println!("Unable to confirm leader at level {}", level);
println!("Returning from get_confirmed_leader_sequence func");
break; // TODO: Will this break out of loop??
}
}
}
leader_sequence
}
//we use the confirmation policy from https://arxiv.org/abs/1810.08092
//This function is heavily borrowed from implementation provided in the actual Prism codebase
//https://github.com/yangl1996/prism-rust/
fn confirm_leader(&mut self, level: u32) -> Option<H256> {
let locked_blockchain = self.blockchain.lock().unwrap();
let proposer_blocks = &locked_blockchain.level2allproposers[&level];
let mut new_leader: Option<H256> = None;
let num_voter_chains: u32 = locked_blockchain.num_voter_chains;
// for each proposer count the number of confirmed votes i.e. votes that are k-deep (2-deep).
let mut num_confirmed_votes: HashMap<H256, u32> = HashMap::new();
for block in proposer_blocks {
if locked_blockchain.proposer2voterinfo.contains_key(block) {
//TODO: We might also need number of voter blocks at a particular level of a voter chain
//This is not urgent as we can **assume**, there is one block at each level
let voters_info = &locked_blockchain.proposer2voterinfo[block];
if voters_info.len() < (num_voter_chains as usize / 2) {
println!("number of votes for {:?} is {}", block, voters_info.len());
continue;
}
let mut total_k_deep_votes: u32 = 0;
for (voter_chain, voter_block) in voters_info {
let voter_block_level = locked_blockchain.voter_chains[(*voter_chain-1) as usize][voter_block].level;
let voter_chain_level = locked_blockchain.voter_depths[(*voter_chain-1) as usize];
let this_vote_depth = voter_chain_level - voter_block_level;
if this_vote_depth >= self.voter_depth_k {
total_k_deep_votes += 1;
}
}
num_confirmed_votes.insert(*block, total_k_deep_votes);
}
}
for (proposer, votes) in num_confirmed_votes.iter() {
println!("proposer {:?} votes {}", proposer, *votes);
if *votes > (num_voter_chains / 2) {
new_leader = Some(*proposer);
break;
}
}
new_leader
}
// needs to process parent as well
fn get_transaction_sequence(&mut self, leader_sequence: &Vec<H256>) -> Vec<SignedTransaction> {
let locked_blockchain = self.blockchain.lock().unwrap();
let mut tx_sequence: Vec<SignedTransaction> = Vec::new();
//TODO: Should we do it recusrively? Like should we also see references to
//proposer references of leader?
//TODO: Also we should refactor it later
for leader in leader_sequence {
let leader_block = &locked_blockchain.proposer_chain[leader].block;
//processing parent and proposer refs
let mut proposer_refs_to_process: Vec<H256> = Vec::new();
let mut leader_txs: Vec<SignedTransaction> = Vec::new();
match &leader_block.content {
Content::Proposer(content) => {
// parent and proposer_refs of leader
let parent = &content.parent_hash;
let proposer_refs = &content.proposer_refs;
if !self.ledger_manager_state.proposer_blocks_processed.contains(parent) {
proposer_refs_to_process.push(*parent);
}
for proposer_ref in proposer_refs {
if !self.ledger_manager_state.proposer_blocks_processed.contains(proposer_ref) {
proposer_refs_to_process.push(*proposer_ref);
}
}
//txs of leader
leader_txs = content.transactions.clone();
}
_ => {
}
}
//TODO: Do we have to do match in this and previous loop as we know it will always
//match to Proposer(content). Can we unwrap??
for proposer_ref in &proposer_refs_to_process {
let proposer_block = &locked_blockchain.proposer_chain[proposer_ref].block;
match &proposer_block.content {
Content::Proposer(content) => {
tx_sequence.append(&mut content.transactions.clone());
}
_ => {
}
}
self.ledger_manager_state.proposer_blocks_processed.insert(*proposer_ref);
}
//appending leader txs finally
//adding leader to proposer_blocks_processed
tx_sequence.append(&mut leader_txs); | self.ledger_manager_state.proposer_blocks_processed.insert(*leader);
}
tx_sequence | random_line_split |
|
PredictionRuntimeInspector.py | brk; brk(port=9011)
exp.pause = True
self.pause = True
self.pauseAtPhaseSetup = False
else:
self.pauseAtPhaseSetup = True
def onPhaseTeardown(self, exp):
title()
index = exp.position.phase
# Last phase
if index == len(exp.workflow) - 1:
self.done = True
def onIter(self, exp, i):
""" """
title(additional='(), self.pause = ' + str(self.pause))
self.iteration += 1
# check if the pause button was clicked
if self.pause:
exp.pause = True
elif self.runCount is not None:
self.runCount -= 1
if self.runCount == 0:
exp.pause = True
runtimelistener.listenersEnabled = exp.pause
def _setProgress(self):
"""Set the progress trait from the iteration."""
self.progress = (self.iteration, self.iterationCount)
def _disableRegions(self):
"""
Disable any regions that can't be run.
Currently only looks for VectorFileEffectors whose outputFile parameter
is equal to 'No output file specified'.
"""
effectors = [e for e in _getElements(self.network) if _isEffector(e)]
for e in effectors:
if e.getParameter('outputFile') == 'No outputFile specified':
_disable(self.network, e.getName())
def _createView(self):
"""Set up a view for the traits."""
items = []
if self.showProgressBar:
items.append(Item('progress', show_label=False,
editor=ProgressEditor(callback=self._seek)))
# Controls
items.append(
alignCenter(
Item('backwardButton', style='custom',
enabled_when='not object.running and object.mainloopRunning '
+'and object.sensors and object.iteration > 1'),
Item('runButton', style='custom',
enabled_when='object.pause and not object.done'),
Item('pauseButton', style='custom',
enabled_when='not (object.pause or object.done)'),
Item('stepButton', style='custom',
enabled_when='object.pause and not object.done'),
show_labels=False,
orientation='horizontal'
))
# Repeat button and pause target buttons
items.append(
alignCenter(
Item('repeatButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning '
'and object.iteration > 0'),
Item('nextTargetButton', show_label=False,
editor=ButtonEditor(label_value='targetButtonLabel'),
enabled_when='not object.running and object.mainloopRunning '
'and object.pauseTarget'),
Item('customTargetButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning')
))
# Speed control
items.append(Item('speed', style='custom', show_label=False,
editor=EnumEditor(cols=1, values={
1 : '1: Slow (update on every iteration)',
10 : '2: Medium (update every 10 iterations)',
100 : '3: Fast (update every 100 iterations)'
})
))
items.extend([
Group(
Item('pauseAtNextStep'),
show_left=False
),
alignLeft(
Item('stopButton', show_label=False, enabled_when='object.iteration')
)
])
self.traits_view = View(*items)
def close(self):
"""Called by MultiInspector upon closing."""
#title()
self.experiment.pause = True
if self.running:
self.running = False
self._unregisterCallbacks()
def start(self, numIterations=0, target=None, tier=0, stop=False,
callback=None):
"""
Start running the network, and start the mainloop if necessary.
numIterations -- Number of iterations to run for (optional).
target -- Run until this condition is met (used by the Vision Framework).
This is distinct from the user-specified target in the GUI, which simply
tells the network when to pause.
tier -- Tier being trained. Used to check the target on it.
stop -- Whether to stop immediately (used when training a tier for 0
iterations).
callback -- Called right after initialization (optional).
"""
#title()
self._registerCallbacks()
self.iterationCount = numIterations
self.iteration = 0
self.stopTarget = target
self.tier = tier
self.pauseAtPhaseSetup = False
if callback:
callback()
self._disableRegions() # Disable regions which can't be run (e.g. effectors)
# Re-enable this window (use this hack to get a reference to it), because
# it may have been disabled at the end of the previous tier/dataset when
# running from the Vision Framework
wx.GetApp().GetTopWindow().Enable()
# Insert a call into the event loop to run (or stop)
#if stop: # Immediately stop - used by training GUI with 0 iterations
# wx.CallLater(self.runInterval, self.stop)
#elif self.running:
# wx.CallLater(self.runInterval, self._run)
self.mainloopRunning = True
if not wx.GetApp().IsMainLoopRunning():
wx.GetApp().MainLoop()
self.close()
def stop(self):
"""Stop running."""
# Stop the experiment
self.experiment.pause = True
# Any extra calls to _run that are in the event loop should return
self.stopping = True
#if self.pauseAtNextTier and self.pauseAtNextDataset:
if self.pauseAtNextStep:
# Pausing, so set running to False -- otherwise we will continue running
# again
self.running = False
# Disable the window to prevent user input until start is called again
# E.g. control will go back to the Vision Framework
wx.GetApp().GetTopWindow().Disable()
self.mainloopRunning = False
wx.GetApp().ExitMainLoop()
def _seek(self, iteration):
"""Seek to the specified iteration."""
# Validate it
if iteration < 1:
iteration = 1
# Seek to one iteration before the specified iteration, then run the
# network for one iteration, so the inspectors will show the right data
self.iteration = iteration - 1
self.experiment.position.iter = iteration - 1
for sensor in self.sensors:
assert sensor.type == 'VectorFileSensor'
sensor.setParameter('position', self.iteration)
self._step()
def _pause(self):
#title(additional='(), self.pause = ' + str(self.pause))
self.pause = True
def _runExperiment(self):
#title(additional='(), self.pause = ' + str(self.pause))
self.experiment.run()
return self.experiment.done
def _run(self):
"""Run the experiment."""
#title(additional='(), self.pause = ' + str(self.pause))
#if self.experiment.done or self.pause:
# return
# self.speed can be either 1, 10, or 100
if not self.iterationCount:
iterations = self.speed
else:
iterations = \
min(self.speed - self.iteration % self.speed, self.iterationCount - self.iteration)
self.runCount = iterations
self.experiment.pause = False
self.done = self._runExperiment()
# If the experiment is done or paused or stepping
if self.done or self.pause:
return
# Schedule another run
wx.CallLater(self.runInterval, self._run)
def update(self, methodName=None, elementName=None, args=None, kwargs=None):
"""
Called automatically in response to runtime engine activity.
Extra arguments (optional) are passed by the wrapped methods,
and they can be used to avoid unnecessary updating.
@param methodName -- RuntimeElement class method that was called.
@param elementName -- RuntimeElement name.
@param args -- Positional arguments passed to the method.
@param kwargs -- Keyword arguments passed to the method.
"""
#if methodName != 'run':
# return
##print methodName
##from dbgp.client import brk; brk(port=9011)
#self.iteration = self.experiment.position.iter
#exp = self.experiment
## check if the pause button was clicked
#if self.pause:
# exp.pause = True
#elif self.runCount is not None:
# self.runCount -= 1
# if self.runCount == 0:
# exp.pause = True
#
#runtimelistener.listenersEnabled = exp.pause
def _step(self):
"""Run the network for one iteration."""
title()
self.runCount = 1
self.experiment.pause = False
self._runExperiment()
self.pause = True
def _iteration_changed(self):
"""
Called automatically by Traits when the iteration updates.
Update the progress bar and check the conditions to see whether to stop or
pause.
"""
if self.showProgressBar:
| try:
self._setProgress()
except:
# may fail when switching from training to inference
from dbgp.client import brk; brk(port=9011)
pass | conditional_block |
|
PredictionRuntimeInspector.py | ("Repeat")
# The "Next Error" button becomes "Next Target" when user sets a custom one
nextTargetButton = Event # Button with dynamic label
targetButtonLabels = ('Next error', 'Next target') | pauseTarget = Str
backwardButton = Button('Back',
image=ImageResource(getNTAImage('backward_36_26')))
runButton = Button('Run',
image=ImageResource(getNTAImage('play_36_26')))
pauseButton = Button('Pause',
image=ImageResource(getNTAImage('pause_36_26')))
stepButton = Button('Step',
image=ImageResource(getNTAImage('forward_36_26')))
# Internal traits
tier = Int
tierCount = Int
running = Bool
iteration = Int
numIterations = Int
mainloopRunning = Bool
spacer = Str
pause = Bool
done = Bool
@staticmethod
def getNames():
"""
Return the short and long names for this inspector. The short name appears
in the dropdown menu, and the long name is used as the window title.
"""
return ('run', 'Run Network')
def __init__(self, parent, experiment, tierCount=0, showProgressBar=False,
multipleDatasets=False, startImmediately=True):
"""
"""
#title()
self.experiment = experiment
self.pause = True
self.experiment.pause = True
self.runCount = None
network = experiment.network
NetworkInspector.__init__(self, parent, network)
self.tierCount = tierCount
self.showProgressBar = showProgressBar
self.multipleDatasets = multipleDatasets
self.stopTarget = None
self.iteration = 0
self.iterationCount = 0
self.usingPauseTarget = False
self.stepping = False
# Look for all sensors (regions without inputs)
# If all sensors are of a supported type, seeking backward is supported
# Otherwise, seeking backward is not supported, because not all sensors
# can be run in reverse
self.sensors = []
for element in _getElements(self.network):
if _isSensor(element):
if element.type == 'VectorFileSensor':
self.sensors.append(element)
else:
# Unsupported sensor type
self.sensors = []
break
# Set the stop button label differently for training and testing
# The stop button isn't shown at all if neither training nor testing, so
# don't worry about that case
if not self.tierCount and self.multipleDatasets:
self.add_trait('stopButton', Button(label='Skip to next dataset'))
else:
self.add_trait('stopButton', Button(label='Stop training this tier'))
# Set up the default pause target (next error), which the user will be
# able to change later
#self._createDefaultPauseTarget()
#self.pauseTarget = self.defaultPauseTarget
# Set up the Traits view
self._createView()
if startImmediately:
self.start() # Start running the network
#RuntimeInspectorBase.__init__(self, parent, network, tierCount, showProgressBar,
# multipleDatasets, startImmediately)
def _registerCallbacks(self):
d = self.experiment.description
for name in 'spTrain', 'tpTrain', 'classifierTrain', 'infer':
if not name in d:
continue
phase = d[name]
if len(phase) > 0:
assert self.onPhaseSetup not in phase[0]['setup']
phase[0]['setup'].append(self.onPhaseSetup)
phase[-1]['finish'].append(self.onPhaseTeardown)
for step in phase:
# Make sure to be the first callback
step['iter'].insert(0, self.onIter)
def _unregisterCallbacks(self):
d = self.experiment.description
for name in 'spTrain', 'tpTrain', 'classifierTrain', 'infer':
if not name in d:
continue
phase = d[name]
if len(phase) > 0:
assert self.onPhaseSetup in phase[0]['setup']
phase[0]['setup'].remove(self.onPhaseSetup)
phase[-1]['finish'].remove(self.onPhaseTeardown)
for step in phase:
# Make sure to be the first callback
step['iter'].remove(self.onIter)
def _getTierName(self, tier):
raise NotImplementedError
def detectNetworkType(self):
self.isFrameworkNetwork = 'sensor' in self.network.regions and \
self.network.regions['sensor'].type == 'RecordSensor'
def _getPhase(self, e):
index = e.position.phase
name = e.workflow[index][0]
return (name, e.description[name])
def onPhaseSetup(self, exp):
title()
self.iteration = 0
self.phase = self._getPhase(exp)
phase = self.phase[1]
self.iterationCount = phase[0]['iterationCount'] if len(phase) > 0 else 0
if self.pauseAtNextStep and self.pauseAtPhaseSetup:
#if self.pauseAtNextStep:
#from dbgp.client import brk; brk(port=9011)
exp.pause = True
self.pause = True
self.pauseAtPhaseSetup = False
else:
self.pauseAtPhaseSetup = True
def onPhaseTeardown(self, exp):
title()
index = exp.position.phase
# Last phase
if index == len(exp.workflow) - 1:
self.done = True
def onIter(self, exp, i):
""" """
title(additional='(), self.pause = ' + str(self.pause))
self.iteration += 1
# check if the pause button was clicked
if self.pause:
exp.pause = True
elif self.runCount is not None:
self.runCount -= 1
if self.runCount == 0:
exp.pause = True
runtimelistener.listenersEnabled = exp.pause
def _setProgress(self):
"""Set the progress trait from the iteration."""
self.progress = (self.iteration, self.iterationCount)
def _disableRegions(self):
"""
Disable any regions that can't be run.
Currently only looks for VectorFileEffectors whose outputFile parameter
is equal to 'No output file specified'.
"""
effectors = [e for e in _getElements(self.network) if _isEffector(e)]
for e in effectors:
if e.getParameter('outputFile') == 'No outputFile specified':
_disable(self.network, e.getName())
def _createView(self):
"""Set up a view for the traits."""
items = []
if self.showProgressBar:
items.append(Item('progress', show_label=False,
editor=ProgressEditor(callback=self._seek)))
# Controls
items.append(
alignCenter(
Item('backwardButton', style='custom',
enabled_when='not object.running and object.mainloopRunning '
+'and object.sensors and object.iteration > 1'),
Item('runButton', style='custom',
enabled_when='object.pause and not object.done'),
Item('pauseButton', style='custom',
enabled_when='not (object.pause or object.done)'),
Item('stepButton', style='custom',
enabled_when='object.pause and not object.done'),
show_labels=False,
orientation='horizontal'
))
# Repeat button and pause target buttons
items.append(
alignCenter(
Item('repeatButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning '
'and object.iteration > 0'),
Item('nextTargetButton', show_label=False,
editor=ButtonEditor(label_value='targetButtonLabel'),
enabled_when='not object.running and object.mainloopRunning '
'and object.pauseTarget'),
Item('customTargetButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning')
))
# Speed control
items.append(Item('speed', style='custom', show_label=False,
editor=EnumEditor(cols=1, values={
1 : '1: Slow (update on every iteration)',
10 : '2: Medium (update every 10 iterations)',
100 : '3: Fast (update every 100 iterations)'
})
))
items.extend([
Group(
Item('pauseAtNextStep'),
show_left=False
),
alignLeft(
Item('stopButton', show_label=False, enabled_when='object.iteration')
)
])
self.traits_view = View(*items)
def close(self):
"""Called by MultiInspector upon closing."""
#title()
self.experiment.pause = True
if self.running:
self.running = False
self._unregisterCallbacks()
def start(self, numIterations=0, target=None, tier=0, stop=False,
callback=None):
"""
Start running the network, and start the mainloop if necessary.
numIterations -- Number of iterations to run for (optional).
target -- Run until this condition is met (used by the Vision Framework).
This is distinct from the user-s | targetButtonLabel = Str(targetButtonLabels[0])
customTargetButton = Button('Custom...') | random_line_split |
PredictionRuntimeInspector.py | i):
""" """
title(additional='(), self.pause = ' + str(self.pause))
self.iteration += 1
# check if the pause button was clicked
if self.pause:
exp.pause = True
elif self.runCount is not None:
self.runCount -= 1
if self.runCount == 0:
exp.pause = True
runtimelistener.listenersEnabled = exp.pause
def _setProgress(self):
"""Set the progress trait from the iteration."""
self.progress = (self.iteration, self.iterationCount)
def _disableRegions(self):
"""
Disable any regions that can't be run.
Currently only looks for VectorFileEffectors whose outputFile parameter
is equal to 'No output file specified'.
"""
effectors = [e for e in _getElements(self.network) if _isEffector(e)]
for e in effectors:
if e.getParameter('outputFile') == 'No outputFile specified':
_disable(self.network, e.getName())
def _createView(self):
"""Set up a view for the traits."""
items = []
if self.showProgressBar:
items.append(Item('progress', show_label=False,
editor=ProgressEditor(callback=self._seek)))
# Controls
items.append(
alignCenter(
Item('backwardButton', style='custom',
enabled_when='not object.running and object.mainloopRunning '
+'and object.sensors and object.iteration > 1'),
Item('runButton', style='custom',
enabled_when='object.pause and not object.done'),
Item('pauseButton', style='custom',
enabled_when='not (object.pause or object.done)'),
Item('stepButton', style='custom',
enabled_when='object.pause and not object.done'),
show_labels=False,
orientation='horizontal'
))
# Repeat button and pause target buttons
items.append(
alignCenter(
Item('repeatButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning '
'and object.iteration > 0'),
Item('nextTargetButton', show_label=False,
editor=ButtonEditor(label_value='targetButtonLabel'),
enabled_when='not object.running and object.mainloopRunning '
'and object.pauseTarget'),
Item('customTargetButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning')
))
# Speed control
items.append(Item('speed', style='custom', show_label=False,
editor=EnumEditor(cols=1, values={
1 : '1: Slow (update on every iteration)',
10 : '2: Medium (update every 10 iterations)',
100 : '3: Fast (update every 100 iterations)'
})
))
items.extend([
Group(
Item('pauseAtNextStep'),
show_left=False
),
alignLeft(
Item('stopButton', show_label=False, enabled_when='object.iteration')
)
])
self.traits_view = View(*items)
def close(self):
"""Called by MultiInspector upon closing."""
#title()
self.experiment.pause = True
if self.running:
self.running = False
self._unregisterCallbacks()
def start(self, numIterations=0, target=None, tier=0, stop=False,
callback=None):
"""
Start running the network, and start the mainloop if necessary.
numIterations -- Number of iterations to run for (optional).
target -- Run until this condition is met (used by the Vision Framework).
This is distinct from the user-specified target in the GUI, which simply
tells the network when to pause.
tier -- Tier being trained. Used to check the target on it.
stop -- Whether to stop immediately (used when training a tier for 0
iterations).
callback -- Called right after initialization (optional).
"""
#title()
self._registerCallbacks()
self.iterationCount = numIterations
self.iteration = 0
self.stopTarget = target
self.tier = tier
self.pauseAtPhaseSetup = False
if callback:
callback()
self._disableRegions() # Disable regions which can't be run (e.g. effectors)
# Re-enable this window (use this hack to get a reference to it), because
# it may have been disabled at the end of the previous tier/dataset when
# running from the Vision Framework
wx.GetApp().GetTopWindow().Enable()
# Insert a call into the event loop to run (or stop)
#if stop: # Immediately stop - used by training GUI with 0 iterations
# wx.CallLater(self.runInterval, self.stop)
#elif self.running:
# wx.CallLater(self.runInterval, self._run)
self.mainloopRunning = True
if not wx.GetApp().IsMainLoopRunning():
wx.GetApp().MainLoop()
self.close()
def stop(self):
"""Stop running."""
# Stop the experiment
self.experiment.pause = True
# Any extra calls to _run that are in the event loop should return
self.stopping = True
#if self.pauseAtNextTier and self.pauseAtNextDataset:
if self.pauseAtNextStep:
# Pausing, so set running to False -- otherwise we will continue running
# again
self.running = False
# Disable the window to prevent user input until start is called again
# E.g. control will go back to the Vision Framework
wx.GetApp().GetTopWindow().Disable()
self.mainloopRunning = False
wx.GetApp().ExitMainLoop()
def _seek(self, iteration):
"""Seek to the specified iteration."""
# Validate it
if iteration < 1:
iteration = 1
# Seek to one iteration before the specified iteration, then run the
# network for one iteration, so the inspectors will show the right data
self.iteration = iteration - 1
self.experiment.position.iter = iteration - 1
for sensor in self.sensors:
assert sensor.type == 'VectorFileSensor'
sensor.setParameter('position', self.iteration)
self._step()
def _pause(self):
#title(additional='(), self.pause = ' + str(self.pause))
self.pause = True
def _runExperiment(self):
#title(additional='(), self.pause = ' + str(self.pause))
self.experiment.run()
return self.experiment.done
def _run(self):
"""Run the experiment."""
#title(additional='(), self.pause = ' + str(self.pause))
#if self.experiment.done or self.pause:
# return
# self.speed can be either 1, 10, or 100
if not self.iterationCount:
iterations = self.speed
else:
iterations = \
min(self.speed - self.iteration % self.speed, self.iterationCount - self.iteration)
self.runCount = iterations
self.experiment.pause = False
self.done = self._runExperiment()
# If the experiment is done or paused or stepping
if self.done or self.pause:
return
# Schedule another run
wx.CallLater(self.runInterval, self._run)
def update(self, methodName=None, elementName=None, args=None, kwargs=None):
"""
Called automatically in response to runtime engine activity.
Extra arguments (optional) are passed by the wrapped methods,
and they can be used to avoid unnecessary updating.
@param methodName -- RuntimeElement class method that was called.
@param elementName -- RuntimeElement name.
@param args -- Positional arguments passed to the method.
@param kwargs -- Keyword arguments passed to the method.
"""
#if methodName != 'run':
# return
##print methodName
##from dbgp.client import brk; brk(port=9011)
#self.iteration = self.experiment.position.iter
#exp = self.experiment
## check if the pause button was clicked
#if self.pause:
# exp.pause = True
#elif self.runCount is not None:
# self.runCount -= 1
# if self.runCount == 0:
# exp.pause = True
#
#runtimelistener.listenersEnabled = exp.pause
def _step(self):
"""Run the network for one iteration."""
title()
self.runCount = 1
self.experiment.pause = False
self._runExperiment()
self.pause = True
def _iteration_changed(self):
"""
Called automatically by Traits when the iteration updates.
Update the progress bar and check the conditions to see whether to stop or
pause.
"""
if self.showProgressBar:
try:
self._setProgress()
except:
# may fail when switching from training to inference
from dbgp.client import brk; brk(port=9011)
pass
def _runButton_fired(self):
self.pause = False
wx.CallLater(self.runInterval, self._run)
def _pauseButton_fired(self):
#from dbgp.client import brk; brk(port=9011)
self.pause = True
#wx.CallLater(self.runInterval, self._pause)
def _stepButton_fired(self):
| wx.CallLater(self.runInterval, self._step) | identifier_body |
|
PredictionRuntimeInspector.py | ("Repeat")
# The "Next Error" button becomes "Next Target" when user sets a custom one
nextTargetButton = Event # Button with dynamic label
targetButtonLabels = ('Next error', 'Next target')
targetButtonLabel = Str(targetButtonLabels[0])
customTargetButton = Button('Custom...')
pauseTarget = Str
backwardButton = Button('Back',
image=ImageResource(getNTAImage('backward_36_26')))
runButton = Button('Run',
image=ImageResource(getNTAImage('play_36_26')))
pauseButton = Button('Pause',
image=ImageResource(getNTAImage('pause_36_26')))
stepButton = Button('Step',
image=ImageResource(getNTAImage('forward_36_26')))
# Internal traits
tier = Int
tierCount = Int
running = Bool
iteration = Int
numIterations = Int
mainloopRunning = Bool
spacer = Str
pause = Bool
done = Bool
@staticmethod
def getNames():
"""
Return the short and long names for this inspector. The short name appears
in the dropdown menu, and the long name is used as the window title.
"""
return ('run', 'Run Network')
def __init__(self, parent, experiment, tierCount=0, showProgressBar=False,
multipleDatasets=False, startImmediately=True):
"""
"""
#title()
self.experiment = experiment
self.pause = True
self.experiment.pause = True
self.runCount = None
network = experiment.network
NetworkInspector.__init__(self, parent, network)
self.tierCount = tierCount
self.showProgressBar = showProgressBar
self.multipleDatasets = multipleDatasets
self.stopTarget = None
self.iteration = 0
self.iterationCount = 0
self.usingPauseTarget = False
self.stepping = False
# Look for all sensors (regions without inputs)
# If all sensors are of a supported type, seeking backward is supported
# Otherwise, seeking backward is not supported, because not all sensors
# can be run in reverse
self.sensors = []
for element in _getElements(self.network):
if _isSensor(element):
if element.type == 'VectorFileSensor':
self.sensors.append(element)
else:
# Unsupported sensor type
self.sensors = []
break
# Set the stop button label differently for training and testing
# The stop button isn't shown at all if neither training nor testing, so
# don't worry about that case
if not self.tierCount and self.multipleDatasets:
self.add_trait('stopButton', Button(label='Skip to next dataset'))
else:
self.add_trait('stopButton', Button(label='Stop training this tier'))
# Set up the default pause target (next error), which the user will be
# able to change later
#self._createDefaultPauseTarget()
#self.pauseTarget = self.defaultPauseTarget
# Set up the Traits view
self._createView()
if startImmediately:
self.start() # Start running the network
#RuntimeInspectorBase.__init__(self, parent, network, tierCount, showProgressBar,
# multipleDatasets, startImmediately)
def | (self):
d = self.experiment.description
for name in 'spTrain', 'tpTrain', 'classifierTrain', 'infer':
if not name in d:
continue
phase = d[name]
if len(phase) > 0:
assert self.onPhaseSetup not in phase[0]['setup']
phase[0]['setup'].append(self.onPhaseSetup)
phase[-1]['finish'].append(self.onPhaseTeardown)
for step in phase:
# Make sure to be the first callback
step['iter'].insert(0, self.onIter)
def _unregisterCallbacks(self):
d = self.experiment.description
for name in 'spTrain', 'tpTrain', 'classifierTrain', 'infer':
if not name in d:
continue
phase = d[name]
if len(phase) > 0:
assert self.onPhaseSetup in phase[0]['setup']
phase[0]['setup'].remove(self.onPhaseSetup)
phase[-1]['finish'].remove(self.onPhaseTeardown)
for step in phase:
# Make sure to be the first callback
step['iter'].remove(self.onIter)
def _getTierName(self, tier):
raise NotImplementedError
def detectNetworkType(self):
self.isFrameworkNetwork = 'sensor' in self.network.regions and \
self.network.regions['sensor'].type == 'RecordSensor'
def _getPhase(self, e):
index = e.position.phase
name = e.workflow[index][0]
return (name, e.description[name])
def onPhaseSetup(self, exp):
title()
self.iteration = 0
self.phase = self._getPhase(exp)
phase = self.phase[1]
self.iterationCount = phase[0]['iterationCount'] if len(phase) > 0 else 0
if self.pauseAtNextStep and self.pauseAtPhaseSetup:
#if self.pauseAtNextStep:
#from dbgp.client import brk; brk(port=9011)
exp.pause = True
self.pause = True
self.pauseAtPhaseSetup = False
else:
self.pauseAtPhaseSetup = True
def onPhaseTeardown(self, exp):
title()
index = exp.position.phase
# Last phase
if index == len(exp.workflow) - 1:
self.done = True
def onIter(self, exp, i):
""" """
title(additional='(), self.pause = ' + str(self.pause))
self.iteration += 1
# check if the pause button was clicked
if self.pause:
exp.pause = True
elif self.runCount is not None:
self.runCount -= 1
if self.runCount == 0:
exp.pause = True
runtimelistener.listenersEnabled = exp.pause
def _setProgress(self):
"""Set the progress trait from the iteration."""
self.progress = (self.iteration, self.iterationCount)
def _disableRegions(self):
"""
Disable any regions that can't be run.
Currently only looks for VectorFileEffectors whose outputFile parameter
is equal to 'No output file specified'.
"""
effectors = [e for e in _getElements(self.network) if _isEffector(e)]
for e in effectors:
if e.getParameter('outputFile') == 'No outputFile specified':
_disable(self.network, e.getName())
def _createView(self):
"""Set up a view for the traits."""
items = []
if self.showProgressBar:
items.append(Item('progress', show_label=False,
editor=ProgressEditor(callback=self._seek)))
# Controls
items.append(
alignCenter(
Item('backwardButton', style='custom',
enabled_when='not object.running and object.mainloopRunning '
+'and object.sensors and object.iteration > 1'),
Item('runButton', style='custom',
enabled_when='object.pause and not object.done'),
Item('pauseButton', style='custom',
enabled_when='not (object.pause or object.done)'),
Item('stepButton', style='custom',
enabled_when='object.pause and not object.done'),
show_labels=False,
orientation='horizontal'
))
# Repeat button and pause target buttons
items.append(
alignCenter(
Item('repeatButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning '
'and object.iteration > 0'),
Item('nextTargetButton', show_label=False,
editor=ButtonEditor(label_value='targetButtonLabel'),
enabled_when='not object.running and object.mainloopRunning '
'and object.pauseTarget'),
Item('customTargetButton', show_label=False,
enabled_when='not object.running and object.mainloopRunning')
))
# Speed control
items.append(Item('speed', style='custom', show_label=False,
editor=EnumEditor(cols=1, values={
1 : '1: Slow (update on every iteration)',
10 : '2: Medium (update every 10 iterations)',
100 : '3: Fast (update every 100 iterations)'
})
))
items.extend([
Group(
Item('pauseAtNextStep'),
show_left=False
),
alignLeft(
Item('stopButton', show_label=False, enabled_when='object.iteration')
)
])
self.traits_view = View(*items)
def close(self):
"""Called by MultiInspector upon closing."""
#title()
self.experiment.pause = True
if self.running:
self.running = False
self._unregisterCallbacks()
def start(self, numIterations=0, target=None, tier=0, stop=False,
callback=None):
"""
Start running the network, and start the mainloop if necessary.
numIterations -- Number of iterations to run for (optional).
target -- Run until this condition is met (used by the Vision Framework).
This is distinct from the user-specified | _registerCallbacks | identifier_name |
04-aruco_calibration.py | uco_type = list(df[(df["place"]=="floor") & (df["aruco_type"]==aruco_type)]["id"])
if len(corners) > 0: # verify *at least* one ArUco marker was detected
for i, markerID in enumerate(list(ids.flatten())): # loop over the detected ArUCo corners
if markerID in ids_on_floor_with_current_aruco_type:
corners_all.append(corners[i])
ids_all.append(markerID)
markerSize = float(df[(df["place"]=="floor") & (df["aruco_type"]==aruco_type) & (df["id"]==markerID)]["size_mm"])
sizes_all.append(markerSize)
# print(corners_all)
# print(ids_all)
# print(sizes_all)
print("[INFO] Num of detected Tags: ",len(corners_all))
corners_all = np.array(corners_all)
ids_all = np.array(ids_all)
sizes_all = np.array(sizes_all)
# verify *at least* one ArUco marker was detected on floor
if len(corners_all) > 0:
rvecs = []
tvecs = []
# loop over the detected ArUCo corners and draw ids and bounding boxes around the detected markers
for (markerCorner, markerID, markerSize) in zip(corners_all, ids_all, sizes_all):
# extract the marker corners (which are always returned in
# top-left, top-right, bottom-right, and bottom-left order)
corners = markerCorner.reshape((4, 2))
(topLeft, topRight, bottomRight, bottomLeft) = corners
# convert each of the (x, y)-coordinate pairs to integers
topRight = (int(topRight[0]), int(topRight[1]))
bottomRight = (int(bottomRight[0]), int(bottomRight[1]))
bottomLeft = (int(bottomLeft[0]), int(bottomLeft[1]))
topLeft = (int(topLeft[0]), int(topLeft[1]))
# draw the bounding box of the ArUCo detection
cv2.line(frame, topLeft, topRight, (0, 255, 0), 1)
cv2.line(frame, topRight, bottomRight, (0, 255, 0), 1)
cv2.line(frame, bottomRight, bottomLeft, (0, 255, 0), 1)
cv2.line(frame, bottomLeft, topLeft, (0, 255, 0), 1)
# compute and draw the center (x, y)-coordinates of the ArUco
# marker
cX = int((topLeft[0] + bottomRight[0]) / 2.0)
cY = int((topLeft[1] + bottomRight[1]) / 2.0)
cv2.circle(frame, (cX, cY), 4, (0, 0, 255), -1)
# draw the ArUco marker ID on the image
cv2.putText(frame, str(markerID),
(topLeft[0], topLeft[1] - 15), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
# Estimate the pose of the detected marker in camera frame
rvec, tvec, markerPoints = cv2.aruco.estimatePoseSingleMarkers(markerCorner, markerSize, mtx_new, dist_new)
cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, markerSize*0.75) # Draw Axis
# print("[INFO] ArUco marker ID: {}".format(markerID))
# print(tvec[0].flatten()) # in camera's frame)
# print(rvec[0].flatten()) # in camera's frame)
rvecs.append(rvec[0])
tvecs.append(tvec[0])
rvecs = np.array(rvecs)
tvecs = np.array(tvecs)
# # Estimate the pose of the detected marker in camera frame at once
# rvecs, tvecs, markerPoints = cv2.aruco.estimatePoseSingleMarkers(corners_all, 190, mtx_new, dist_new)
# cv2.aruco.drawDetectedMarkers(frame, corners_all, ids_all) # Draw Bounding boxes
# for (rvec, tvec) in zip(rvecs, tvecs):
# cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, 190*0.75) # Draw Axis
# show the output image
cv2.imshow("Image", frame)
cv2.waitKey(0)
# print(tvecs)
# print(type(tvec))
# print(type(tvecs))
# Transform detected locations from camera frame to World frame
tvecs = np.squeeze(tvecs).T # (3,N)
tvecs = R_oc @ tvecs # (3,N)
tvecs = T_oc + tvecs # (3,N)
# print(np.shape(tvecs))
# Calculate the best fitting plane
origin = np.mean(tvecs,axis=1).reshape(3,1)
tvecs2 = np.squeeze(tvecs - origin) # (3, N)
(U,S,Vt) = np.linalg.svd(tvecs2)
normal_vec = U[:,-1].reshape(3,1)
# Calculate residuals of the plane fitting
distances = normal_vec.T @ tvecs2
RMSE = math.sqrt(np.mean(np.square(distances)))
# print("RMSE: ", RMSE, " (mm)")
# Plot the data and fitting plane
# plot data
plt.figure()
ax = plt.subplot(111, projection='3d')
ax.scatter(tvecs[0,:], tvecs[1,:], tvecs[2,:], color='b')
# plot plane
xlim = ax.get_xlim()
ylim = ax.get_ylim()
X,Y = np.meshgrid(np.arange(xlim[0], xlim[1], step=500),
np.arange(ylim[0], ylim[1], step=500))
Z = -(normal_vec[0]/normal_vec[2])*(X-origin[0]) - (normal_vec[1]/normal_vec[2])*(Y-origin[1]) + origin[2]
ax.plot_wireframe(X,Y,Z, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()
## Calculate the frame on the best fit plane based on world origin
# Project the world origin onto the best fit plane
origin_new = (normal_vec.T @ origin) * normal_vec
# Projecy world frame x axis onto the origin
x = np.array([[1],[0],[0]])
x_p = x - (normal_vec.T @ (x - origin) ) * normal_vec
# calculate the corresponding x axis on the plane
x_new = (x_p - origin_new) / np.sqrt(np.sum((x_p - origin_new)**2))
# calculate the corresponding y axis on the plane
y_new = np.cross(np.squeeze(normal_vec), np.squeeze(x_new)).reshape(3,1)
# y_new = y_new / np.sqrt(np.sum(y_new**2)) # Normalization is not necessary since x_new and normal_vec are perpendicular
# Define the rotation matrix from world frame to new plane frame
R_op = np.concatenate((x_new, y_new,normal_vec), axis=1)
R_po = R_op.T
# Define the translation between world frame to plane frame
T_op = origin_new
T_po = -R_po @ T_op
# As an example project all data points to the plane
tvecs3 = R_po @ tvecs # (3,N)
tvecs3 = T_po + tvecs3 # (3,N)
tvecs3 = tvecs3[0:2,:]
plt.figure()
plt.title("2D Data")
plt.xlabel("X")
plt.ylabel("Y")
plt.scatter(tvecs3[0,:], tvecs3[1,:], marker='x')
plt.axis('equal')
plt.grid()
plt.show()
return R_op, R_po, T_op, T_po, RMSE
def load_coefficients(path):
| """ Loads camera matrix and distortion coefficients. """
# FILE_STORAGE_READ
cv_file = cv2.FileStorage(path, cv2.FILE_STORAGE_READ)
# note we also have to specify the type to retrieve other wise we only get a
# FileNode object back instead of a matrix
camera_matrix = cv_file.getNode("K").mat()
dist_matrix = cv_file.getNode("D").mat()
try:
R_co = cv_file.getNode("R_co").mat()
R_oc = cv_file.getNode("R_oc").mat()
T_co = cv_file.getNode("T_co").mat()
T_oc = cv_file.getNode("T_oc").mat()
except:
print("[INFO]: could not read R_co, R_oc, T_co, T_oc from: {}".format(path))
print(str(R_co), str(R_oc), str(T_co), str(T_oc))
cv_file.release()
return [camera_matrix, dist_matrix]
| identifier_body |
|
04-aruco_calibration.py | # verify *at least* one ArUco marker was detected on floor
if len(corners_all) > 0:
rvecs = []
tvecs = []
# loop over the detected ArUCo corners and draw ids and bounding boxes around the detected markers
for (markerCorner, markerID, markerSize) in zip(corners_all, ids_all, sizes_all):
# extract the marker corners (which are always returned in
# top-left, top-right, bottom-right, and bottom-left order)
corners = markerCorner.reshape((4, 2))
(topLeft, topRight, bottomRight, bottomLeft) = corners
# convert each of the (x, y)-coordinate pairs to integers
topRight = (int(topRight[0]), int(topRight[1]))
bottomRight = (int(bottomRight[0]), int(bottomRight[1]))
bottomLeft = (int(bottomLeft[0]), int(bottomLeft[1]))
topLeft = (int(topLeft[0]), int(topLeft[1]))
# draw the bounding box of the ArUCo detection
cv2.line(frame, topLeft, topRight, (0, 255, 0), 1)
cv2.line(frame, topRight, bottomRight, (0, 255, 0), 1)
cv2.line(frame, bottomRight, bottomLeft, (0, 255, 0), 1)
cv2.line(frame, bottomLeft, topLeft, (0, 255, 0), 1)
# compute and draw the center (x, y)-coordinates of the ArUco
# marker
cX = int((topLeft[0] + bottomRight[0]) / 2.0)
cY = int((topLeft[1] + bottomRight[1]) / 2.0)
cv2.circle(frame, (cX, cY), 4, (0, 0, 255), -1)
# draw the ArUco marker ID on the image
cv2.putText(frame, str(markerID),
(topLeft[0], topLeft[1] - 15), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
# Estimate the pose of the detected marker in camera frame
rvec, tvec, markerPoints = cv2.aruco.estimatePoseSingleMarkers(markerCorner, markerSize, mtx_new, dist_new)
cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, markerSize*0.75) # Draw Axis
# print("[INFO] ArUco marker ID: {}".format(markerID))
# print(tvec[0].flatten()) # in camera's frame)
# print(rvec[0].flatten()) # in camera's frame)
rvecs.append(rvec[0])
tvecs.append(tvec[0])
rvecs = np.array(rvecs)
tvecs = np.array(tvecs)
# # Estimate the pose of the detected marker in camera frame at once
# rvecs, tvecs, markerPoints = cv2.aruco.estimatePoseSingleMarkers(corners_all, 190, mtx_new, dist_new)
# cv2.aruco.drawDetectedMarkers(frame, corners_all, ids_all) # Draw Bounding boxes
# for (rvec, tvec) in zip(rvecs, tvecs):
# cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, 190*0.75) # Draw Axis
# show the output image
cv2.imshow("Image", frame)
cv2.waitKey(0)
# print(tvecs)
# print(type(tvec))
# print(type(tvecs))
# Transform detected locations from camera frame to World frame
tvecs = np.squeeze(tvecs).T # (3,N)
tvecs = R_oc @ tvecs # (3,N)
tvecs = T_oc + tvecs # (3,N)
# print(np.shape(tvecs))
# Calculate the best fitting plane
origin = np.mean(tvecs,axis=1).reshape(3,1)
tvecs2 = np.squeeze(tvecs - origin) # (3, N)
(U,S,Vt) = np.linalg.svd(tvecs2)
normal_vec = U[:,-1].reshape(3,1)
# Calculate residuals of the plane fitting
distances = normal_vec.T @ tvecs2
RMSE = math.sqrt(np.mean(np.square(distances)))
# print("RMSE: ", RMSE, " (mm)")
# Plot the data and fitting plane
# plot data
plt.figure()
ax = plt.subplot(111, projection='3d')
ax.scatter(tvecs[0,:], tvecs[1,:], tvecs[2,:], color='b')
# plot plane
xlim = ax.get_xlim()
ylim = ax.get_ylim()
X,Y = np.meshgrid(np.arange(xlim[0], xlim[1], step=500),
np.arange(ylim[0], ylim[1], step=500))
Z = -(normal_vec[0]/normal_vec[2])*(X-origin[0]) - (normal_vec[1]/normal_vec[2])*(Y-origin[1]) + origin[2]
ax.plot_wireframe(X,Y,Z, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()
## Calculate the frame on the best fit plane based on world origin
# Project the world origin onto the best fit plane
origin_new = (normal_vec.T @ origin) * normal_vec
# Projecy world frame x axis onto the origin
x = np.array([[1],[0],[0]])
x_p = x - (normal_vec.T @ (x - origin) ) * normal_vec
# calculate the corresponding x axis on the plane
x_new = (x_p - origin_new) / np.sqrt(np.sum((x_p - origin_new)**2))
# calculate the corresponding y axis on the plane
y_new = np.cross(np.squeeze(normal_vec), np.squeeze(x_new)).reshape(3,1)
# y_new = y_new / np.sqrt(np.sum(y_new**2)) # Normalization is not necessary since x_new and normal_vec are perpendicular
# Define the rotation matrix from world frame to new plane frame
R_op = np.concatenate((x_new, y_new,normal_vec), axis=1)
R_po = R_op.T
# Define the translation between world frame to plane frame
T_op = origin_new
T_po = -R_po @ T_op
# As an example project all data points to the plane
tvecs3 = R_po @ tvecs # (3,N)
tvecs3 = T_po + tvecs3 # (3,N)
tvecs3 = tvecs3[0:2,:]
plt.figure()
plt.title("2D Data")
plt.xlabel("X")
plt.ylabel("Y")
plt.scatter(tvecs3[0,:], tvecs3[1,:], marker='x')
plt.axis('equal')
plt.grid()
plt.show()
return R_op, R_po, T_op, T_po, RMSE
def load_coefficients(path):
""" Loads camera matrix and distortion coefficients. """
# FILE_STORAGE_READ
cv_file = cv2.FileStorage(path, cv2.FILE_STORAGE_READ)
# note we also have to specify the type to retrieve other wise we only get a
# FileNode object back instead of a matrix
camera_matrix = cv_file.getNode("K").mat()
dist_matrix = cv_file.getNode("D").mat()
try:
R_co = cv_file.getNode("R_co").mat()
R_oc = cv_file.getNode("R_oc").mat()
T_co = cv_file.getNode("T_co").mat()
T_oc = cv_file.getNode("T_oc").mat()
except:
print("[INFO]: could not read R_co, R_oc, T_co, T_oc from: {}".format(path))
print(str(R_co), str(R_oc), str(T_co), str(T_oc))
cv_file.release()
return [camera_matrix, dist_matrix]
cv_file.release()
return [camera_matrix, dist_matrix, R_co, R_oc, T_co, T_oc]
def capture_img(image_dir, image_name, image_format):
cam = cv2.VideoCapture(1)
cam.set(3,3840)
cam.set(4,2160)
# cam.set(3,640)
# cam.set(4,480)
print("Hit SPACE key to capture, Hit ESC key to continue")
img_name = image_dir + "/" + image_name + "." + image_format
cv2.namedWindow("test")
while True:
ret, frame = cam.read()
if not ret:
print("failed to grab frame")
break
cv2.imshow("test", frame)
k = cv2.waitKey(1)
if k%256 == 27:
# ESC pressed
| print("Escape hit, closing...")
img = cv2.imread(img_name)
break | conditional_block |
|
04-aruco_calibration.py | boxes around the detected markers
for (markerCorner, markerID, markerSize) in zip(corners_all, ids_all, sizes_all):
# extract the marker corners (which are always returned in
# top-left, top-right, bottom-right, and bottom-left order)
corners = markerCorner.reshape((4, 2))
(topLeft, topRight, bottomRight, bottomLeft) = corners
# convert each of the (x, y)-coordinate pairs to integers
topRight = (int(topRight[0]), int(topRight[1]))
bottomRight = (int(bottomRight[0]), int(bottomRight[1]))
bottomLeft = (int(bottomLeft[0]), int(bottomLeft[1]))
topLeft = (int(topLeft[0]), int(topLeft[1]))
# draw the bounding box of the ArUCo detection
cv2.line(frame, topLeft, topRight, (0, 255, 0), 1)
cv2.line(frame, topRight, bottomRight, (0, 255, 0), 1)
cv2.line(frame, bottomRight, bottomLeft, (0, 255, 0), 1)
cv2.line(frame, bottomLeft, topLeft, (0, 255, 0), 1)
# compute and draw the center (x, y)-coordinates of the ArUco
# marker
cX = int((topLeft[0] + bottomRight[0]) / 2.0)
cY = int((topLeft[1] + bottomRight[1]) / 2.0)
cv2.circle(frame, (cX, cY), 4, (0, 0, 255), -1)
# draw the ArUco marker ID on the image
cv2.putText(frame, str(markerID),
(topLeft[0], topLeft[1] - 15), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
# Estimate the pose of the detected marker in camera frame
rvec, tvec, markerPoints = cv2.aruco.estimatePoseSingleMarkers(markerCorner, markerSize, mtx_new, dist_new)
cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, markerSize*0.75) # Draw Axis
# print("[INFO] ArUco marker ID: {}".format(markerID))
# print(tvec[0].flatten()) # in camera's frame)
# print(rvec[0].flatten()) # in camera's frame)
rvecs.append(rvec[0])
tvecs.append(tvec[0])
rvecs = np.array(rvecs)
tvecs = np.array(tvecs)
# # Estimate the pose of the detected marker in camera frame at once
# rvecs, tvecs, markerPoints = cv2.aruco.estimatePoseSingleMarkers(corners_all, 190, mtx_new, dist_new)
# cv2.aruco.drawDetectedMarkers(frame, corners_all, ids_all) # Draw Bounding boxes
# for (rvec, tvec) in zip(rvecs, tvecs):
# cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, 190*0.75) # Draw Axis
# show the output image
cv2.imshow("Image", frame)
cv2.waitKey(0)
# print(tvecs)
# print(type(tvec))
# print(type(tvecs))
# Transform detected locations from camera frame to World frame
tvecs = np.squeeze(tvecs).T # (3,N)
tvecs = R_oc @ tvecs # (3,N)
tvecs = T_oc + tvecs # (3,N)
# print(np.shape(tvecs))
# Calculate the best fitting plane
origin = np.mean(tvecs,axis=1).reshape(3,1)
tvecs2 = np.squeeze(tvecs - origin) # (3, N)
(U,S,Vt) = np.linalg.svd(tvecs2)
normal_vec = U[:,-1].reshape(3,1)
# Calculate residuals of the plane fitting
distances = normal_vec.T @ tvecs2
RMSE = math.sqrt(np.mean(np.square(distances)))
# print("RMSE: ", RMSE, " (mm)")
# Plot the data and fitting plane
# plot data
plt.figure()
ax = plt.subplot(111, projection='3d')
ax.scatter(tvecs[0,:], tvecs[1,:], tvecs[2,:], color='b')
# plot plane
xlim = ax.get_xlim()
ylim = ax.get_ylim()
X,Y = np.meshgrid(np.arange(xlim[0], xlim[1], step=500),
np.arange(ylim[0], ylim[1], step=500))
Z = -(normal_vec[0]/normal_vec[2])*(X-origin[0]) - (normal_vec[1]/normal_vec[2])*(Y-origin[1]) + origin[2]
ax.plot_wireframe(X,Y,Z, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()
## Calculate the frame on the best fit plane based on world origin
# Project the world origin onto the best fit plane
origin_new = (normal_vec.T @ origin) * normal_vec
# Projecy world frame x axis onto the origin
x = np.array([[1],[0],[0]])
x_p = x - (normal_vec.T @ (x - origin) ) * normal_vec
# calculate the corresponding x axis on the plane
x_new = (x_p - origin_new) / np.sqrt(np.sum((x_p - origin_new)**2))
# calculate the corresponding y axis on the plane
y_new = np.cross(np.squeeze(normal_vec), np.squeeze(x_new)).reshape(3,1)
# y_new = y_new / np.sqrt(np.sum(y_new**2)) # Normalization is not necessary since x_new and normal_vec are perpendicular
# Define the rotation matrix from world frame to new plane frame
R_op = np.concatenate((x_new, y_new,normal_vec), axis=1)
R_po = R_op.T
# Define the translation between world frame to plane frame
T_op = origin_new
T_po = -R_po @ T_op
# As an example project all data points to the plane
tvecs3 = R_po @ tvecs # (3,N)
tvecs3 = T_po + tvecs3 # (3,N)
tvecs3 = tvecs3[0:2,:]
plt.figure()
plt.title("2D Data")
plt.xlabel("X")
plt.ylabel("Y")
plt.scatter(tvecs3[0,:], tvecs3[1,:], marker='x')
plt.axis('equal')
plt.grid()
plt.show()
return R_op, R_po, T_op, T_po, RMSE
def load_coefficients(path):
""" Loads camera matrix and distortion coefficients. """
# FILE_STORAGE_READ
cv_file = cv2.FileStorage(path, cv2.FILE_STORAGE_READ)
# note we also have to specify the type to retrieve other wise we only get a
# FileNode object back instead of a matrix
camera_matrix = cv_file.getNode("K").mat()
dist_matrix = cv_file.getNode("D").mat()
try:
R_co = cv_file.getNode("R_co").mat()
R_oc = cv_file.getNode("R_oc").mat()
T_co = cv_file.getNode("T_co").mat()
T_oc = cv_file.getNode("T_oc").mat()
except:
print("[INFO]: could not read R_co, R_oc, T_co, T_oc from: {}".format(path))
print(str(R_co), str(R_oc), str(T_co), str(T_oc))
cv_file.release()
return [camera_matrix, dist_matrix]
cv_file.release()
return [camera_matrix, dist_matrix, R_co, R_oc, T_co, T_oc]
def capture_img(image_dir, image_name, image_format):
cam = cv2.VideoCapture(1)
cam.set(3,3840)
cam.set(4,2160)
# cam.set(3,640)
# cam.set(4,480)
print("Hit SPACE key to capture, Hit ESC key to continue")
img_name = image_dir + "/" + image_name + "." + image_format
cv2.namedWindow("test")
while True:
ret, frame = cam.read()
if not ret:
print("failed to grab frame")
break
cv2.imshow("test", frame)
k = cv2.waitKey(1)
if k%256 == 27:
# ESC pressed
print("Escape hit, closing...")
img = cv2.imread(img_name)
break
elif k%256 == 32:
# SPACE pressed
cv2.imwrite(img_name, frame)
print("{} written!".format(img_name))
cam.release()
cv2.destroyAllWindows()
return img
def | save_coefficients | identifier_name |
|
04-aruco_calibration.py | uco.DICT_APRILTAG_36h10,
"DICT_APRILTAG_36h11": cv2.aruco.DICT_APRILTAG_36h11
}
def calibrate_aruco(intrinsic_calib_path, intrinsic_calib_path_undistorted, image_dir, image_name, image_format, aruco_tags_info_path):
[mtx, dist, R_co, R_oc, T_co, T_oc] = load_coefficients(intrinsic_calib_path)
[mtx_new, dist_new, R_co, R_oc, T_co, T_oc] = load_coefficients(intrinsic_calib_path_undistorted) |
frame = capture_img(image_dir, image_name, image_format)
# try undistorted image
# h, w = frame.shape[:2]
# newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w,h), 1, (w,h))
# undistort
# # frame = cv2.undistort(frame, mtx, dist, None, newcameramtx)
frame = cv2.undistort(frame, mtx, dist, None, mtx)
# # # crop the image
# # x, y, w, h = roi
# # frame = frame[y:y+h, x:x+w]
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# read csv file that includes places of the aruco tags, their aruco type, ids, sizes and locations wrt their places
df = pd.read_csv(aruco_tags_info_path)
# get aruco dictionary types on the floor as a list
aruco_types = list(df[df["place"]=="floor"]["aruco_type"].unique())
corners_all = []
ids_all = []
sizes_all = []
for aruco_type in aruco_types:
arucoType = ARUCO_DICT[aruco_type]
# verify that the supplied ArUCo tag exists and is supported by OpenCV
if ARUCO_DICT.get(aruco_type, None) is None:
print("[ERROR] ArUCo tag of '{}' is not supported".format(aruco_type))
sys.exit(0)
# load the ArUCo dictionary, grab the ArUCo parameters, and detect the markers
print("[INFO] detecting '{}' tags...".format(aruco_type))
arucoDict = cv2.aruco.Dictionary_get(ARUCO_DICT[aruco_type])
arucoParams = cv2.aruco.DetectorParameters_create()
(corners, ids, rejected) = cv2.aruco.detectMarkers(frame, arucoDict, parameters=arucoParams)
# only keep the detections that are on the floor by looking at the IDs
ids_on_floor_with_current_aruco_type = list(df[(df["place"]=="floor") & (df["aruco_type"]==aruco_type)]["id"])
if len(corners) > 0: # verify *at least* one ArUco marker was detected
for i, markerID in enumerate(list(ids.flatten())): # loop over the detected ArUCo corners
if markerID in ids_on_floor_with_current_aruco_type:
corners_all.append(corners[i])
ids_all.append(markerID)
markerSize = float(df[(df["place"]=="floor") & (df["aruco_type"]==aruco_type) & (df["id"]==markerID)]["size_mm"])
sizes_all.append(markerSize)
# print(corners_all)
# print(ids_all)
# print(sizes_all)
print("[INFO] Num of detected Tags: ",len(corners_all))
corners_all = np.array(corners_all)
ids_all = np.array(ids_all)
sizes_all = np.array(sizes_all)
# verify *at least* one ArUco marker was detected on floor
if len(corners_all) > 0:
rvecs = []
tvecs = []
# loop over the detected ArUCo corners and draw ids and bounding boxes around the detected markers
for (markerCorner, markerID, markerSize) in zip(corners_all, ids_all, sizes_all):
# extract the marker corners (which are always returned in
# top-left, top-right, bottom-right, and bottom-left order)
corners = markerCorner.reshape((4, 2))
(topLeft, topRight, bottomRight, bottomLeft) = corners
# convert each of the (x, y)-coordinate pairs to integers
topRight = (int(topRight[0]), int(topRight[1]))
bottomRight = (int(bottomRight[0]), int(bottomRight[1]))
bottomLeft = (int(bottomLeft[0]), int(bottomLeft[1]))
topLeft = (int(topLeft[0]), int(topLeft[1]))
# draw the bounding box of the ArUCo detection
cv2.line(frame, topLeft, topRight, (0, 255, 0), 1)
cv2.line(frame, topRight, bottomRight, (0, 255, 0), 1)
cv2.line(frame, bottomRight, bottomLeft, (0, 255, 0), 1)
cv2.line(frame, bottomLeft, topLeft, (0, 255, 0), 1)
# compute and draw the center (x, y)-coordinates of the ArUco
# marker
cX = int((topLeft[0] + bottomRight[0]) / 2.0)
cY = int((topLeft[1] + bottomRight[1]) / 2.0)
cv2.circle(frame, (cX, cY), 4, (0, 0, 255), -1)
# draw the ArUco marker ID on the image
cv2.putText(frame, str(markerID),
(topLeft[0], topLeft[1] - 15), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
# Estimate the pose of the detected marker in camera frame
rvec, tvec, markerPoints = cv2.aruco.estimatePoseSingleMarkers(markerCorner, markerSize, mtx_new, dist_new)
cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, markerSize*0.75) # Draw Axis
# print("[INFO] ArUco marker ID: {}".format(markerID))
# print(tvec[0].flatten()) # in camera's frame)
# print(rvec[0].flatten()) # in camera's frame)
rvecs.append(rvec[0])
tvecs.append(tvec[0])
rvecs = np.array(rvecs)
tvecs = np.array(tvecs)
# # Estimate the pose of the detected marker in camera frame at once
# rvecs, tvecs, markerPoints = cv2.aruco.estimatePoseSingleMarkers(corners_all, 190, mtx_new, dist_new)
# cv2.aruco.drawDetectedMarkers(frame, corners_all, ids_all) # Draw Bounding boxes
# for (rvec, tvec) in zip(rvecs, tvecs):
# cv2.aruco.drawAxis(frame, mtx_new, dist_new, rvec, tvec, 190*0.75) # Draw Axis
# show the output image
cv2.imshow("Image", frame)
cv2.waitKey(0)
# print(tvecs)
# print(type(tvec))
# print(type(tvecs))
# Transform detected locations from camera frame to World frame
tvecs = np.squeeze(tvecs).T # (3,N)
tvecs = R_oc @ tvecs # (3,N)
tvecs = T_oc + tvecs # (3,N)
# print(np.shape(tvecs))
# Calculate the best fitting plane
origin = np.mean(tvecs,axis=1).reshape(3,1)
tvecs2 = np.squeeze(tvecs - origin) # (3, N)
(U,S,Vt) = np.linalg.svd(tvecs2)
normal_vec = U[:,-1].reshape(3,1)
# Calculate residuals of the plane fitting
distances = normal_vec.T @ tvecs2
RMSE = math.sqrt(np.mean(np.square(distances)))
# print("RMSE: ", RMSE, " (mm)")
# Plot the data and fitting plane
# plot data
plt.figure()
ax = plt.subplot(111, projection='3d')
ax.scatter(tvecs[0,:], tvecs[1,:], tvecs[2,:], color='b')
# plot plane
xlim = ax.get_xlim()
ylim = ax.get_ylim()
X,Y = np.meshgrid(np.arange(xlim[0], xlim[1], step=500),
np.arange(ylim[0], ylim[1], step=500))
Z = -(normal_vec[0]/normal_vec[2])*(X-origin[0]) - (normal_vec[1]/normal_vec[2])*(Y-origin[1]) + origin[2]
ax.plot_wireframe(X,Y,Z, color='k')
ax.set_xlabel('x')
ax.set_ylabel('y | random_line_split |
|
huff_algorithm.py | (self):
if (self.lvl ==None):
return "\t%.3d=%c id:%0.3d : %.3f\r\n" % (self.symbol, self.symbol,\
self.id,self.prob)
else :
return "\t%.3d=%c id:%0.3d : %.3f (%d)\r\n" % (self.symbol,self.symbol,\
self.id,self.prob, self.lvl)
# @brief Binary tree leaf constructor specific class prototype contains symbol_val field and does
# not need any fields for descending nodes (dead end, None on default)
# @param Symbol value -> character which is represented by this node
# @param Symbol probability -> how many times the specific symbol occured in the sourcedata.
# @param Node level -> designated during BT generation (None on default).
class TBinTree_NodeGenerator:
# @brief
# @param Symbol value -> character which is represented by this node
def __init__(self, filedata): #constructor which accepts string as arg and uses it to generate dictionary to store input source data (arg -> filedata string
self.S_LOW_VALUE=32 #space
self.S_HIGH_VALUE=125 # }- character
self.pPopulation = 0 #total character count
#create empty list as property pBTLeafList -> for fresh data
self.pBTLeafList = list()
#create empty list as property pSymbolsList_sorted -> for sorted fresh data
self.pSymbolsList_sorted = list()
#create empty dictionary as property pSymbolsDict
self.pSymbolsDict = dict()
#fill list with symbol data (character with their probability)
for ascii_code in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1): # for every ASCII code from LOW_VALUE to HIGH_VALUE
x = filedata.count(chr(ascii_code)) # count number of characters in string
if (x>0) : # if character appears in string at least once
self.pSymbolsDict.update({ascii_code:x}) # put it into dictionary with ASCII code as key and no. of appearences as value
self.pPopulation +=x # all counted symbols are added to total source data population
self.pListString = str()
def SortData(self): # creates list by sorting symbols along probability
for key, value in sorted(self.pSymbolsDict.iteritems(), key=lambda (k,v): (v,k),reverse=False):
self.pSymbolsList_sorted.append((key,value))
def Pop2Prob(self): # takes total population into consideration and tranlates it into probability
for x in range(0,len(self.pSymbolsList_sorted)):
tempItem = list(self.pSymbolsList_sorted.pop(x))
tempItem[1]=float(tempItem[1])/self.pPopulation
print tempItem
self.pSymbolsList_sorted.insert(x,tempItem)
#self.pSymbolsList_sorted[x]=self.pSymbolsList_sorted[x]/self.pPopulation
print self.pSymbolsList_sorted
def setNewSymbolsDict(self, newDict):
self.pSymbolsDict = newDict
def SortedLeafGen(self):
for leaf_no in range(0, len(self.pSymbolsList_sorted)):
self.pBTLeafList.append(TBinTree_Leaf(self.pSymbolsList_sorted[leaf_no][0],self.pSymbolsList_sorted[leaf_no][1]))
self.LeafPopulation = len(self.pBTLeafList)
def DictPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for key in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1):
if (self.pSymbolsDict.has_key(key)):
print "%.3d='%c' -> %.3f " % (key, key, self.pSymbolsDict[key], \
float(self.pSymbolsDict[key])/self.pPopulation )
def ListPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for i in range(0,len(self.pSymbolsList_sorted)):
print("%.3d=%c : %.3f" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
self.pListString = self.pListString + ("%.3d='%c' : %.3f\n" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
def GetPopulation(self):
return self.pPopulation
def GetSortedList(self):
return self.pSymbolsList_sorted
def GetNodeList(self):
return self.pBTLeafList
def GetSourceEntropy(self):
Entropy = 0
temp = 0
for x in range(0,len(self.pSymbolsList_sorted)):
temp = self.pSymbolsList_sorted[x][1]
Entropy+= temp*np.log2(1.0/temp)
print temp
print "Source entropy:\t%f" % (Entropy)
return Entropy
class TBinTree_Tree:
def __init__(self, Leaves):
self.pPopulation = Leaves.GetPopulation()
self.LeavesList = Leaves.GetNodeList()
self.All_leaves = list()
self.All_edges = list()
self.CodeList = dict()
def __call__(self,option=None): #by default top-down method of defininf binary tree, if parameters is present- bottom-up
if (option==None):
self.root = fBinaryTreeBuilder(self.LeavesList, 0)
else :
self.root = fBinaryTreeBuilderBU(self.LeavesList, 0)
self.AvgCL = fMECalc(self.root)
def ShowBT(self):
fBinaryTreeNodeFinder(self.root)
def GraphGen(self):
#global graphen
print "Starting Source tree graph generation..."
fBinaryTreeNodeCartograph(self.root)
BTGraph.write_png('SourceTree.png')
print "Graph complete."
def CodingListGenerator(self):
global gCodingDict
print "Generating symbol coding list..."
fBinaryTreeNodeFinder(self.root,Action=1)
# print dictionary with symbol coding
dictKeys = gCodingDict.keys()
for x in range(0,len(gCodingDict)):
print "%02d)\'%c\' -> %s"% (x,dictKeys[x],gCodingDict[dictKeys[x]])
def CodeMessage(self,MessageContent, Action=None):
global gCodingDict
if (Action==None):
codedMsg = ""
print len(MessageContent)
for x in range(0,len(MessageContent)): #TODO try KeyError -> dictionary unspecified key handling - done
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg = codedMsg + gCodingDict[ord(MessageContent[x])] + "|"
return codedMsg
elif (Action==1):
codedMsg = list()
for x in range(0,len(MessageContent)):
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg.append(gCodingDict[ord(MessageContent[x])])
return codedMsg
def GetAvgCL(self):
return self.AvgCL
# @brief Decoding function as a method of Binary tree class which cooperates with indepednent
# fDecodeBit function which is repeteadly called until the end of coded message.
# Global variable which temporarly stores string is used.
# @param Encoded as a string.
def DecodeMessage(self,MessageContent):
print "Commencing message decoding..."
global gTempCodedMsg
gTempCodedMsg = "".join(MessageContent) #copy coded message string into global variable string for further manipulations
gTempCodedMsg = gTempCodedMsg.replace("|","")
#print gTempCodedMsg
tempString = ""
while (len(gTempCodedMsg)): #while there are bits of the coded message available run decoding of consecutive symbols in loop
symbol = fDecodeBit(self.root, gTempCodedMsg)
print symbol
tempString = tempString + chr(symbol) # concatanate character symbol to string
return tempString
print
global gTempCodedMsg
gTempCodedMsg = str()
# @brief Indepednent function which recursively calls itself to explore binary tree until Leaf is reached
# and proper symbol retrieved
# @param Current - strarting point for exploring.
# @param Piece of encoded message - the compass to navigate between branches.
def fDecodeBit(CurrentNode,CodedMsg):
global gTempCodedMsg
gTempCodedMsg = CodedMsg
if (CurrentNode.__class__.__name__ == 'TBinTree_Leaf'):
return CurrentNode.symbol
elif (CurrentNode.__class__.__name__ == 'TBinTree_Node'):
#if (len(CodedMsg)):
#print CodedMsg[0]
if (CodedMsg[0]=='0') :
return fDecodeBit(CurrentNode.b_zero, CodedMsg[1:])
elif (CodedMsg[0]=='1') :
return fDecodeBit(CurrentNode.b_one, CodedMsg[1:])
else :
print "DecodeError 2 Message!"
else:
print "DecodeError 1 Message!"
# @brief Indepednent function with implementation of suboptimal top-down method of source tree generation
# algorithm
# @param List of BTLeaves representing symbols sorted along probability
# @param Current level - codelength
def fBinaryTreeBuilder(LeavesList,Level) : # top-down method | __repr__ | identifier_name |
|
huff_algorithm.py | Dict = newDict
def SortedLeafGen(self):
for leaf_no in range(0, len(self.pSymbolsList_sorted)):
self.pBTLeafList.append(TBinTree_Leaf(self.pSymbolsList_sorted[leaf_no][0],self.pSymbolsList_sorted[leaf_no][1]))
self.LeafPopulation = len(self.pBTLeafList)
def DictPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for key in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1):
if (self.pSymbolsDict.has_key(key)):
print "%.3d='%c' -> %.3f " % (key, key, self.pSymbolsDict[key], \
float(self.pSymbolsDict[key])/self.pPopulation )
def ListPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for i in range(0,len(self.pSymbolsList_sorted)):
print("%.3d=%c : %.3f" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
self.pListString = self.pListString + ("%.3d='%c' : %.3f\n" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
def GetPopulation(self):
return self.pPopulation
def GetSortedList(self):
return self.pSymbolsList_sorted
def GetNodeList(self):
return self.pBTLeafList
def GetSourceEntropy(self):
Entropy = 0
temp = 0
for x in range(0,len(self.pSymbolsList_sorted)):
temp = self.pSymbolsList_sorted[x][1]
Entropy+= temp*np.log2(1.0/temp)
print temp
print "Source entropy:\t%f" % (Entropy)
return Entropy
class TBinTree_Tree:
def __init__(self, Leaves):
self.pPopulation = Leaves.GetPopulation()
self.LeavesList = Leaves.GetNodeList()
self.All_leaves = list()
self.All_edges = list()
self.CodeList = dict()
def __call__(self,option=None): #by default top-down method of defininf binary tree, if parameters is present- bottom-up
if (option==None):
self.root = fBinaryTreeBuilder(self.LeavesList, 0)
else :
self.root = fBinaryTreeBuilderBU(self.LeavesList, 0)
self.AvgCL = fMECalc(self.root)
def ShowBT(self):
fBinaryTreeNodeFinder(self.root)
def GraphGen(self):
#global graphen
print "Starting Source tree graph generation..."
fBinaryTreeNodeCartograph(self.root)
BTGraph.write_png('SourceTree.png')
print "Graph complete."
def CodingListGenerator(self):
global gCodingDict
print "Generating symbol coding list..."
fBinaryTreeNodeFinder(self.root,Action=1)
# print dictionary with symbol coding
dictKeys = gCodingDict.keys()
for x in range(0,len(gCodingDict)):
print "%02d)\'%c\' -> %s"% (x,dictKeys[x],gCodingDict[dictKeys[x]])
def CodeMessage(self,MessageContent, Action=None):
global gCodingDict
if (Action==None):
codedMsg = ""
print len(MessageContent)
for x in range(0,len(MessageContent)): #TODO try KeyError -> dictionary unspecified key handling - done
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg = codedMsg + gCodingDict[ord(MessageContent[x])] + "|"
return codedMsg
elif (Action==1):
codedMsg = list()
for x in range(0,len(MessageContent)):
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg.append(gCodingDict[ord(MessageContent[x])])
return codedMsg
def GetAvgCL(self):
return self.AvgCL
# @brief Decoding function as a method of Binary tree class which cooperates with indepednent
# fDecodeBit function which is repeteadly called until the end of coded message.
# Global variable which temporarly stores string is used.
# @param Encoded as a string.
def DecodeMessage(self,MessageContent):
print "Commencing message decoding..."
global gTempCodedMsg
gTempCodedMsg = "".join(MessageContent) #copy coded message string into global variable string for further manipulations
gTempCodedMsg = gTempCodedMsg.replace("|","")
#print gTempCodedMsg
tempString = ""
while (len(gTempCodedMsg)): #while there are bits of the coded message available run decoding of consecutive symbols in loop
symbol = fDecodeBit(self.root, gTempCodedMsg)
print symbol
tempString = tempString + chr(symbol) # concatanate character symbol to string
return tempString
print
global gTempCodedMsg
gTempCodedMsg = str()
# @brief Indepednent function which recursively calls itself to explore binary tree until Leaf is reached
# and proper symbol retrieved
# @param Current - strarting point for exploring.
# @param Piece of encoded message - the compass to navigate between branches.
def fDecodeBit(CurrentNode,CodedMsg):
global gTempCodedMsg
gTempCodedMsg = CodedMsg
if (CurrentNode.__class__.__name__ == 'TBinTree_Leaf'):
return CurrentNode.symbol
elif (CurrentNode.__class__.__name__ == 'TBinTree_Node'):
#if (len(CodedMsg)):
#print CodedMsg[0]
if (CodedMsg[0]=='0') :
return fDecodeBit(CurrentNode.b_zero, CodedMsg[1:])
elif (CodedMsg[0]=='1') :
return fDecodeBit(CurrentNode.b_one, CodedMsg[1:])
else :
print "DecodeError 2 Message!"
else:
print "DecodeError 1 Message!"
# @brief Indepednent function with implementation of suboptimal top-down method of source tree generation
# algorithm
# @param List of BTLeaves representing symbols sorted along probability
# @param Current level - codelength
def fBinaryTreeBuilder(LeavesList,Level) : # top-down method
print "Generating Soure tree implementing top-down algorithm."
leaves_in_list = len(LeavesList)
Population=0
print LeavesList
for i in LeavesList[:] :
Population+=i.prob
print("\tcounted pop=%d,nodes=%d\n" % (Population, leaves_in_list))
if ( leaves_in_list < 3 ) :
if (leaves_in_list == 0) :
print "ERROR!->leaf node empty"
return None
elif (leaves_in_list == 1) :
total_leaf_prob = LeavesList[0].prob
LeavesList[0].UpdateLvl(Level)
return LeavesList[0]
elif (leaves_in_list == 2) :
total_leaf_prob = LeavesList[0].prob + LeavesList[1].prob
LeavesList[0].UpdateLvl(Level+1)
LeavesList[1].UpdateLvl(Level+1)
NewBTNode = TBinTree_Node(total_leaf_prob,LeavesList[0], LeavesList[1], Level)
return NewBTNode
else :
tempPopulation = float(1.0)#Population
index = 0
prob_sum = 0
while ( 1 ) :
prob_sum = prob_sum + LeavesList[index].prob
if ( (prob_sum<(0.5*Population)) and (index<(leaves_in_list)-1) ) :
index = index + 1
else :
break
BinaryNodeZero = fBinaryTreeBuilder(LeavesList[:index], Level+1)
BinaryNodeOne = fBinaryTreeBuilder(LeavesList[index:], Level+1)
return TBinTree_Node( Population, BinaryNodeZero, BinaryNodeOne, Level)
# @brief Indepednent function with more efficient implementation of bottom-up method of source tree generation
# algorithm.
# @param List of BTLeaves representing symbols sorted along probability
# @param Current level - codelength
def fBinaryTreeBuilderBU(LeavesList,Level) : # bottom-up method
| print "Generating Soure tree implementing bottom-up algorithm."
print LeavesList.__class__.__name__
print LeavesList
while (len(LeavesList)>1):
for leafIndex in range(0,len(LeavesList)-1) :
if (LeavesList[leafIndex].prob>LeavesList[leafIndex+1].prob):
temp = LeavesList.pop(leafIndex)
LeavesList.insert(leafIndex+1, temp)
newNode = TBinTree_Node(LeavesList[0].prob+LeavesList[1].prob, LeavesList[0], LeavesList[1], 0)
LeavesList.insert(0, newNode)
LeavesList.pop(1)
LeavesList.pop(1)
fLvlUpdate(LeavesList[0],0)
return LeavesList[0] | identifier_body |
|
huff_algorithm.py | )\r\n" % (self.symbol,self.symbol,\
self.id,self.prob, self.lvl)
# @brief Binary tree leaf constructor specific class prototype contains symbol_val field and does
# not need any fields for descending nodes (dead end, None on default)
# @param Symbol value -> character which is represented by this node
# @param Symbol probability -> how many times the specific symbol occured in the sourcedata.
# @param Node level -> designated during BT generation (None on default).
class TBinTree_NodeGenerator:
# @brief
# @param Symbol value -> character which is represented by this node
def __init__(self, filedata): #constructor which accepts string as arg and uses it to generate dictionary to store input source data (arg -> filedata string
self.S_LOW_VALUE=32 #space
self.S_HIGH_VALUE=125 # }- character
self.pPopulation = 0 #total character count
#create empty list as property pBTLeafList -> for fresh data
self.pBTLeafList = list()
#create empty list as property pSymbolsList_sorted -> for sorted fresh data
self.pSymbolsList_sorted = list()
#create empty dictionary as property pSymbolsDict
self.pSymbolsDict = dict()
#fill list with symbol data (character with their probability)
for ascii_code in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1): # for every ASCII code from LOW_VALUE to HIGH_VALUE
x = filedata.count(chr(ascii_code)) # count number of characters in string
if (x>0) : # if character appears in string at least once
self.pSymbolsDict.update({ascii_code:x}) # put it into dictionary with ASCII code as key and no. of appearences as value
self.pPopulation +=x # all counted symbols are added to total source data population
self.pListString = str()
def SortData(self): # creates list by sorting symbols along probability |
def Pop2Prob(self): # takes total population into consideration and tranlates it into probability
for x in range(0,len(self.pSymbolsList_sorted)):
tempItem = list(self.pSymbolsList_sorted.pop(x))
tempItem[1]=float(tempItem[1])/self.pPopulation
print tempItem
self.pSymbolsList_sorted.insert(x,tempItem)
#self.pSymbolsList_sorted[x]=self.pSymbolsList_sorted[x]/self.pPopulation
print self.pSymbolsList_sorted
def setNewSymbolsDict(self, newDict):
self.pSymbolsDict = newDict
def SortedLeafGen(self):
for leaf_no in range(0, len(self.pSymbolsList_sorted)):
self.pBTLeafList.append(TBinTree_Leaf(self.pSymbolsList_sorted[leaf_no][0],self.pSymbolsList_sorted[leaf_no][1]))
self.LeafPopulation = len(self.pBTLeafList)
def DictPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for key in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1):
if (self.pSymbolsDict.has_key(key)):
print "%.3d='%c' -> %.3f " % (key, key, self.pSymbolsDict[key], \
float(self.pSymbolsDict[key])/self.pPopulation )
def ListPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for i in range(0,len(self.pSymbolsList_sorted)):
print("%.3d=%c : %.3f" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
self.pListString = self.pListString + ("%.3d='%c' : %.3f\n" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
def GetPopulation(self):
return self.pPopulation
def GetSortedList(self):
return self.pSymbolsList_sorted
def GetNodeList(self):
return self.pBTLeafList
def GetSourceEntropy(self):
Entropy = 0
temp = 0
for x in range(0,len(self.pSymbolsList_sorted)):
temp = self.pSymbolsList_sorted[x][1]
Entropy+= temp*np.log2(1.0/temp)
print temp
print "Source entropy:\t%f" % (Entropy)
return Entropy
class TBinTree_Tree:
def __init__(self, Leaves):
self.pPopulation = Leaves.GetPopulation()
self.LeavesList = Leaves.GetNodeList()
self.All_leaves = list()
self.All_edges = list()
self.CodeList = dict()
def __call__(self,option=None): #by default top-down method of defininf binary tree, if parameters is present- bottom-up
if (option==None):
self.root = fBinaryTreeBuilder(self.LeavesList, 0)
else :
self.root = fBinaryTreeBuilderBU(self.LeavesList, 0)
self.AvgCL = fMECalc(self.root)
def ShowBT(self):
fBinaryTreeNodeFinder(self.root)
def GraphGen(self):
#global graphen
print "Starting Source tree graph generation..."
fBinaryTreeNodeCartograph(self.root)
BTGraph.write_png('SourceTree.png')
print "Graph complete."
def CodingListGenerator(self):
global gCodingDict
print "Generating symbol coding list..."
fBinaryTreeNodeFinder(self.root,Action=1)
# print dictionary with symbol coding
dictKeys = gCodingDict.keys()
for x in range(0,len(gCodingDict)):
print "%02d)\'%c\' -> %s"% (x,dictKeys[x],gCodingDict[dictKeys[x]])
def CodeMessage(self,MessageContent, Action=None):
global gCodingDict
if (Action==None):
codedMsg = ""
print len(MessageContent)
for x in range(0,len(MessageContent)): #TODO try KeyError -> dictionary unspecified key handling - done
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg = codedMsg + gCodingDict[ord(MessageContent[x])] + "|"
return codedMsg
elif (Action==1):
codedMsg = list()
for x in range(0,len(MessageContent)):
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg.append(gCodingDict[ord(MessageContent[x])])
return codedMsg
def GetAvgCL(self):
return self.AvgCL
# @brief Decoding function as a method of Binary tree class which cooperates with indepednent
# fDecodeBit function which is repeteadly called until the end of coded message.
# Global variable which temporarly stores string is used.
# @param Encoded as a string.
def DecodeMessage(self,MessageContent):
print "Commencing message decoding..."
global gTempCodedMsg
gTempCodedMsg = "".join(MessageContent) #copy coded message string into global variable string for further manipulations
gTempCodedMsg = gTempCodedMsg.replace("|","")
#print gTempCodedMsg
tempString = ""
while (len(gTempCodedMsg)): #while there are bits of the coded message available run decoding of consecutive symbols in loop
symbol = fDecodeBit(self.root, gTempCodedMsg)
print symbol
tempString = tempString + chr(symbol) # concatanate character symbol to string
return tempString
print
global gTempCodedMsg
gTempCodedMsg = str()
# @brief Indepednent function which recursively calls itself to explore binary tree until Leaf is reached
# and proper symbol retrieved
# @param Current - strarting point for exploring.
# @param Piece of encoded message - the compass to navigate between branches.
def fDecodeBit(CurrentNode,CodedMsg):
global gTempCodedMsg
gTempCodedMsg = CodedMsg
if (CurrentNode.__class__.__name__ == 'TBinTree_Leaf'):
return CurrentNode.symbol
elif (CurrentNode.__class__.__name__ == 'TBinTree_Node'):
#if (len(CodedMsg)):
#print CodedMsg[0]
if (CodedMsg[0]=='0') :
return fDecodeBit(CurrentNode.b_zero, CodedMsg[1:])
elif (CodedMsg[0]=='1') :
return fDecodeBit(CurrentNode.b_one, CodedMsg[1:])
else :
print "DecodeError 2 Message!"
else:
print "DecodeError 1 Message!"
# @brief Indepednent function with implementation of suboptimal top-down method of source tree generation
# algorithm
# @param List of BTLeaves representing symbols sorted along probability
# @param Current level - codelength
def fBinaryTreeBuilder(LeavesList,Level) : # top-down method
print "Generating Soure tree implementing top-down algorithm."
leaves_in_list = len(LeavesList)
Population=0
print LeavesList
for i in LeavesList[:] :
Population+=i.prob
print("\tcounted pop=%d,nodes=%d\n" % (Population, leaves_in_list))
if ( | for key, value in sorted(self.pSymbolsDict.iteritems(), key=lambda (k,v): (v,k),reverse=False):
self.pSymbolsList_sorted.append((key,value)) | random_line_split |
huff_algorithm.py | r\n" % (self.symbol,self.symbol,\
self.id,self.prob, self.lvl)
# @brief Binary tree leaf constructor specific class prototype contains symbol_val field and does
# not need any fields for descending nodes (dead end, None on default)
# @param Symbol value -> character which is represented by this node
# @param Symbol probability -> how many times the specific symbol occured in the sourcedata.
# @param Node level -> designated during BT generation (None on default).
class TBinTree_NodeGenerator:
# @brief
# @param Symbol value -> character which is represented by this node
def __init__(self, filedata): #constructor which accepts string as arg and uses it to generate dictionary to store input source data (arg -> filedata string
self.S_LOW_VALUE=32 #space
self.S_HIGH_VALUE=125 # }- character
self.pPopulation = 0 #total character count
#create empty list as property pBTLeafList -> for fresh data
self.pBTLeafList = list()
#create empty list as property pSymbolsList_sorted -> for sorted fresh data
self.pSymbolsList_sorted = list()
#create empty dictionary as property pSymbolsDict
self.pSymbolsDict = dict()
#fill list with symbol data (character with their probability)
for ascii_code in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1): # for every ASCII code from LOW_VALUE to HIGH_VALUE
x = filedata.count(chr(ascii_code)) # count number of characters in string
if (x>0) : # if character appears in string at least once
self.pSymbolsDict.update({ascii_code:x}) # put it into dictionary with ASCII code as key and no. of appearences as value
self.pPopulation +=x # all counted symbols are added to total source data population
self.pListString = str()
def SortData(self): # creates list by sorting symbols along probability
for key, value in sorted(self.pSymbolsDict.iteritems(), key=lambda (k,v): (v,k),reverse=False):
self.pSymbolsList_sorted.append((key,value))
def Pop2Prob(self): # takes total population into consideration and tranlates it into probability
for x in range(0,len(self.pSymbolsList_sorted)):
tempItem = list(self.pSymbolsList_sorted.pop(x))
tempItem[1]=float(tempItem[1])/self.pPopulation
print tempItem
self.pSymbolsList_sorted.insert(x,tempItem)
#self.pSymbolsList_sorted[x]=self.pSymbolsList_sorted[x]/self.pPopulation
print self.pSymbolsList_sorted
def setNewSymbolsDict(self, newDict):
self.pSymbolsDict = newDict
def SortedLeafGen(self):
for leaf_no in range(0, len(self.pSymbolsList_sorted)):
self.pBTLeafList.append(TBinTree_Leaf(self.pSymbolsList_sorted[leaf_no][0],self.pSymbolsList_sorted[leaf_no][1]))
self.LeafPopulation = len(self.pBTLeafList)
def DictPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for key in range(self.S_LOW_VALUE, self.S_HIGH_VALUE+1):
if (self.pSymbolsDict.has_key(key)):
print "%.3d='%c' -> %.3f " % (key, key, self.pSymbolsDict[key], \
float(self.pSymbolsDict[key])/self.pPopulation )
def ListPrint(self):
print ("\r\nIn total = %d" % self.pPopulation)
for i in range(0,len(self.pSymbolsList_sorted)):
print("%.3d=%c : %.3f" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
self.pListString = self.pListString + ("%.3d='%c' : %.3f\n" % (self.pSymbolsList_sorted[i][0],self.pSymbolsList_sorted[i][0], \
self.pSymbolsList_sorted[i][1]))
def GetPopulation(self):
return self.pPopulation
def GetSortedList(self):
return self.pSymbolsList_sorted
def GetNodeList(self):
return self.pBTLeafList
def GetSourceEntropy(self):
Entropy = 0
temp = 0
for x in range(0,len(self.pSymbolsList_sorted)):
temp = self.pSymbolsList_sorted[x][1]
Entropy+= temp*np.log2(1.0/temp)
print temp
print "Source entropy:\t%f" % (Entropy)
return Entropy
class TBinTree_Tree:
def __init__(self, Leaves):
self.pPopulation = Leaves.GetPopulation()
self.LeavesList = Leaves.GetNodeList()
self.All_leaves = list()
self.All_edges = list()
self.CodeList = dict()
def __call__(self,option=None): #by default top-down method of defininf binary tree, if parameters is present- bottom-up
if (option==None):
self.root = fBinaryTreeBuilder(self.LeavesList, 0)
else :
self.root = fBinaryTreeBuilderBU(self.LeavesList, 0)
self.AvgCL = fMECalc(self.root)
def ShowBT(self):
fBinaryTreeNodeFinder(self.root)
def GraphGen(self):
#global graphen
print "Starting Source tree graph generation..."
fBinaryTreeNodeCartograph(self.root)
BTGraph.write_png('SourceTree.png')
print "Graph complete."
def CodingListGenerator(self):
global gCodingDict
print "Generating symbol coding list..."
fBinaryTreeNodeFinder(self.root,Action=1)
# print dictionary with symbol coding
dictKeys = gCodingDict.keys()
for x in range(0,len(gCodingDict)):
print "%02d)\'%c\' -> %s"% (x,dictKeys[x],gCodingDict[dictKeys[x]])
def CodeMessage(self,MessageContent, Action=None):
global gCodingDict
if (Action==None):
codedMsg = ""
print len(MessageContent)
for x in range(0,len(MessageContent)): #TODO try KeyError -> dictionary unspecified key handling - done
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg = codedMsg + gCodingDict[ord(MessageContent[x])] + "|"
return codedMsg
elif (Action==1):
codedMsg = list()
for x in range(0,len(MessageContent)):
if (gCodingDict.has_key(ord(MessageContent[x]))==False):
return None
codedMsg.append(gCodingDict[ord(MessageContent[x])])
return codedMsg
def GetAvgCL(self):
return self.AvgCL
# @brief Decoding function as a method of Binary tree class which cooperates with indepednent
# fDecodeBit function which is repeteadly called until the end of coded message.
# Global variable which temporarly stores string is used.
# @param Encoded as a string.
def DecodeMessage(self,MessageContent):
print "Commencing message decoding..."
global gTempCodedMsg
gTempCodedMsg = "".join(MessageContent) #copy coded message string into global variable string for further manipulations
gTempCodedMsg = gTempCodedMsg.replace("|","")
#print gTempCodedMsg
tempString = ""
while (len(gTempCodedMsg)): #while there are bits of the coded message available run decoding of consecutive symbols in loop
symbol = fDecodeBit(self.root, gTempCodedMsg)
print symbol
tempString = tempString + chr(symbol) # concatanate character symbol to string
return tempString
print
global gTempCodedMsg
gTempCodedMsg = str()
# @brief Indepednent function which recursively calls itself to explore binary tree until Leaf is reached
# and proper symbol retrieved
# @param Current - strarting point for exploring.
# @param Piece of encoded message - the compass to navigate between branches.
def fDecodeBit(CurrentNode,CodedMsg):
global gTempCodedMsg
gTempCodedMsg = CodedMsg
if (CurrentNode.__class__.__name__ == 'TBinTree_Leaf'):
return CurrentNode.symbol
elif (CurrentNode.__class__.__name__ == 'TBinTree_Node'):
#if (len(CodedMsg)):
#print CodedMsg[0]
if (CodedMsg[0]=='0') :
|
elif (CodedMsg[0]=='1') :
return fDecodeBit(CurrentNode.b_one, CodedMsg[1:])
else :
print "DecodeError 2 Message!"
else:
print "DecodeError 1 Message!"
# @brief Indepednent function with implementation of suboptimal top-down method of source tree generation
# algorithm
# @param List of BTLeaves representing symbols sorted along probability
# @param Current level - codelength
def fBinaryTreeBuilder(LeavesList,Level) : # top-down method
print "Generating Soure tree implementing top-down algorithm."
leaves_in_list = len(LeavesList)
Population=0
print LeavesList
for i in LeavesList[:] :
Population+=i.prob
print("\tcounted pop=%d,nodes=%d\n" % (Population, leaves_in_list))
if | return fDecodeBit(CurrentNode.b_zero, CodedMsg[1:]) | conditional_block |
rtc_api.rs | const COUNTA_HAPPENED = 0b0100_0000;
const WATCHA_HAPPENED = 0b1000_0000;
}
}
pub const ABRTCMC_CONTROL3: u8 = 0x02;
bitflags! {
pub struct Control3: u8 {
const BATTLOW_INT = 0b0000_0001;
const BATTSWITCH_INT = 0b0000_0010;
const BATTLOW_STAT = 0b0000_0100;
const BATTSW_HAPPENED = 0b0000_1000;
const BATT_STD_BL_EN = 0b0000_0000;
const BATT_DIR_BL_EN = 0b0010_0000;
const BATT_DIS_BL_EN = 0b0100_0000;
const BATT_STD_BL_DIS = 0b1000_0000;
const BATT_DIR_BL_DIS = 0b1010_0000;
const BATT_DI_BL_DIS = 0b1110_0000;
}
}
pub const ABRTCMC_SECONDS: u8 = 0x3;
bitflags! {
pub struct Seconds: u8 {
const SECONDS_BCD = 0b0111_1111;
const CORRUPTED = 0b1000_0000;
}
}
pub const ABRTCMC_MINUTES: u8 = 0x4;
// no bitflags, minutes are BCD whole register
pub const ABRTCMC_HOURS: u8 = 0x5;
bitflags! {
pub struct Hours: u8 {
const HR12_HOURS_BCD = 0b0001_1111;
const HR12_PM_FLAG = 0b0010_0000;
const HR24_HOURS_BCD = 0b0011_1111;
}
}
pub const ABRTCMC_DAYS: u8 = 0x6;
// no bitflags, days are BCD whole register
pub const ABRTCMC_WEEKDAYS: u8 = 0x7;
bitflags! {
pub struct Weekdays: u8 {
const SUNDAY = 0b000;
const MONDAY = 0b001;
const TUESDAY = 0b010;
const WEDNESDAY= 0b011;
const THURSDAY = 0b100;
const FRIDAY = 0b101;
const SATURDAY = 0b110;
}
}
pub const ABRTCMC_MONTHS: u8 = 0x8;
bitflags! {
pub struct Months: u8 { // BCD "months"
const JANUARY = 0b0_0001;
const FEBRUARY = 0b0_0010;
const MARCH = 0b0_0011;
const APRIL = 0b0_0100;
const MAY = 0b0_0101;
const JUNE = 0b0_0110;
const JULY = 0b0_0111;
const AUGUST = 0b0_1000;
const SEPTEMBER = 0b0_1001;
const OCTOBER = 0b1_0000;
const NOVEMBER = 0b1_0001;
const DECEMBER = 0b1_0010;
}
}
pub const ABRTCMC_YEARS: u8 = 0x9;
// no bitflags, years are 00-99 in BCD format
pub const ABRTCMC_MINUTE_ALARM: u8 = 0xA;
pub const ABRTCMC_HOUR_ALARM: u8 = 0xB;
pub const ABRTCMC_DAY_ALARM: u8 = 0xC;
pub const ABRTCMC_WEEKDAY_ALARM: u8 = 0xD;
bitflags! {
pub struct Alarm: u8 {
const ENABLE = 0b1000_0000;
// all others code minute/hour/day/weekday in BCD LSBs
const HR12_PM_FLAG = 0b0010_0000; // only used in hours alarm, 12-hour mode
}
}
pub const ABRTCMC_CONFIG: u8 = 0xF;
bitflags! {
pub struct Config: u8 {
const TIMER_B_ENABLE = 0b0000_0001;
const TIMER_A_WATCHDOG = 0b0000_0100;
const TIMER_A_COUNTDWN = 0b0000_0010;
const TIMER_A_DISABLE = 0b0000_0000;
const TIMER_A_DISABLE2 = 0b0000_0110;
const CLKOUT_32768_HZ = 0b0000_0000;
const CLKOUT_16384_HZ = 0b0000_1000;
const CLKOUT_8192_HZ = 0b0001_0000;
const CLKOUT_4096_HZ = 0b0001_1000;
const CLKOUT_1024_HZ = 0b0010_0000;
const CLKOUT_32_HZ = 0b0010_1000;
const CLKOUT_1_HZ = 0b0011_0000;
const CLKOUT_DISABLE = 0b0011_1000;
const TIMERB_INT_PULSED = 0b0100_0000;
const TIMERA_SECONDS_INT_PULSED = 0b1000_0000;
}
}
pub const ABRTCMC_TIMERA_CLK: u8 = 0x10;
pub const ABRTCMC_TIMERB_CLK: u8 = 0x12;
bitflags! {
pub struct TimerClk: u8 {
const CLK_3600_S = 0b0000_0100;
const CLK_60_S = 0b0000_0011;
const CLK_1_S = 0b0000_0010;
const CLK_64_HZ = 0b0000_0001; // 15.625ms
const CLK_4096_HZ = 0b0000_0000; // 0.2441ms
const PULSE_46_MS = 0b0000_0000;
const PULSE_62_MS = 0b0001_0000;
const PULSE_78_MS = 0b0010_0000;
const PULSE_93_MS = 0b0011_0000;
const PULSE_125_MS = 0b0100_0000;
const PULSE_156_MS = 0b0101_0000;
const PULSE_187_MS = 0b0110_0000;
const PULSE_218_MS = 0b0111_0000;
}
}
pub const ABRTCMC_TIMERA: u8 = 0x11;
// no bitflags, register is timer period in seconds, and the period is N / (source clock frequency)
pub const ABRTCMC_TIMERB: u8 = 0x13;
// no bitflags, register is timer period in seconds, and the period is N / (source clock frequency)
/// This function takes the raw &[u8] as returned by the RTC I2C low level read function
/// and converts it to a number of seconds. All hardware RTC readings are based off of the
/// BCD equivalent of Jan 1 2000, 00:00:00, but keep in mind this is just an internal representation.
/// We turn this into a u64 number of seconds because what we really want out of the hardware RTC
/// is _just_ a count of seconds from some arbitrary but fixed start point, that we anchor through other
/// algorithms to UTC.
pub fn | rtc_to_seconds | identifier_name |
|
rtc_api.rs | 000_0001;
const TIMER_A_WATCHDOG = 0b0000_0100;
const TIMER_A_COUNTDWN = 0b0000_0010;
const TIMER_A_DISABLE = 0b0000_0000;
const TIMER_A_DISABLE2 = 0b0000_0110;
const CLKOUT_32768_HZ = 0b0000_0000;
const CLKOUT_16384_HZ = 0b0000_1000;
const CLKOUT_8192_HZ = 0b0001_0000;
const CLKOUT_4096_HZ = 0b0001_1000;
const CLKOUT_1024_HZ = 0b0010_0000;
const CLKOUT_32_HZ = 0b0010_1000;
const CLKOUT_1_HZ = 0b0011_0000;
const CLKOUT_DISABLE = 0b0011_1000;
const TIMERB_INT_PULSED = 0b0100_0000;
const TIMERA_SECONDS_INT_PULSED = 0b1000_0000;
}
}
pub const ABRTCMC_TIMERA_CLK: u8 = 0x10;
pub const ABRTCMC_TIMERB_CLK: u8 = 0x12;
bitflags! {
pub struct TimerClk: u8 {
const CLK_3600_S = 0b0000_0100;
const CLK_60_S = 0b0000_0011;
const CLK_1_S = 0b0000_0010;
const CLK_64_HZ = 0b0000_0001; // 15.625ms
const CLK_4096_HZ = 0b0000_0000; // 0.2441ms
const PULSE_46_MS = 0b0000_0000;
const PULSE_62_MS = 0b0001_0000;
const PULSE_78_MS = 0b0010_0000;
const PULSE_93_MS = 0b0011_0000;
const PULSE_125_MS = 0b0100_0000;
const PULSE_156_MS = 0b0101_0000;
const PULSE_187_MS = 0b0110_0000;
const PULSE_218_MS = 0b0111_0000;
}
}
pub const ABRTCMC_TIMERA: u8 = 0x11;
// no bitflags, register is timer period in seconds, and the period is N / (source clock frequency)
pub const ABRTCMC_TIMERB: u8 = 0x13;
// no bitflags, register is timer period in seconds, and the period is N / (source clock frequency)
/// This function takes the raw &[u8] as returned by the RTC I2C low level read function
/// and converts it to a number of seconds. All hardware RTC readings are based off of the
/// BCD equivalent of Jan 1 2000, 00:00:00, but keep in mind this is just an internal representation.
/// We turn this into a u64 number of seconds because what we really want out of the hardware RTC
/// is _just_ a count of seconds from some arbitrary but fixed start point, that we anchor through other
/// algorithms to UTC.
pub fn rtc_to_seconds(settings: &[u8]) -> Option<u64> {
const CTL3: usize = 0;
const SECS: usize = 1;
const MINS: usize = 2;
const HOURS: usize = 3;
const DAYS: usize = 4;
// note 5 is skipped - this is weekdays, and is unused
const MONTHS: usize = 6;
const YEARS: usize = 7;
if ((settings[CTL3] & 0xE0) != crate::RTC_PWR_MODE) // power switchover setting should be initialized
|| (settings[SECS] & 0x80 != 0) { // clock integrity should be guaranteed
log::error!("RTC is in an uninitialized state!, {:x?}", settings);
return None;
}
// this is a secondary check -- I have seen RTC return nonsense time results before
// so this is an extra check above and beyond what's in the datasheet
if (to_binary(settings[SECS]) > 59)
|| (to_binary(settings[MINS]) > 59)
|| (to_binary(settings[HOURS]) > 23) // 24 hour mode is default and assumed
|| (to_binary(settings[DAYS]) > 31) || (to_binary(settings[DAYS]) == 0)
|| (to_binary(settings[MONTHS]) > 12) || (to_binary(settings[MONTHS]) == 0)
|| (to_binary(settings[YEARS]) > 99) {
log::error!("RTC has invalid digits!: {:?}", settings);
return None;
}
let mut total_secs: u64 = 0;
total_secs += to_binary(settings[SECS]) as u64;
total_secs += to_binary(settings[MINS]) as u64 * 60;
total_secs += to_binary(settings[HOURS]) as u64 * 3600;
const SECS_PER_DAY: u64 = 86400;
// DAYS is checked to be 1-31, so, it's safe to subtract 1 here
total_secs += (to_binary(settings[DAYS]) as u64 - 1) * SECS_PER_DAY;
// this will iterate from 0 through 11; december never has an offset added, because its contribution is directly measured in DAYS
for month in 0..to_binary(settings[MONTHS]) {
match month {
0 => total_secs += 0u64,
1 => total_secs += 31u64 * SECS_PER_DAY,
2 => {
// per spec sheet: 1) If the year counter contains a value which is exactly divisible by 4 (including the year 00),
// the AB-RTCMC-32.768kHz-B5ZE-S3 compensates for leap years by adding a 29th day to February.
if (to_binary(settings[YEARS]) % 4) == 0 {
total_secs += 29u64 * SECS_PER_DAY;
} else {
total_secs += 28u64 * SECS_PER_DAY;
};
},
3 => total_secs += 31u64 * SECS_PER_DAY,
4 => total_secs += 30u64 * SECS_PER_DAY,
5 => total_secs += 31u64 * SECS_PER_DAY,
6 => total_secs += 30u64 * SECS_PER_DAY,
7 => total_secs += 31u64 * SECS_PER_DAY,
8 => total_secs += 31u64 * SECS_PER_DAY,
9 => total_secs += 30u64 * SECS_PER_DAY,
10 => total_secs += 31u64 * SECS_PER_DAY,
11 => total_secs += 30u64 * SECS_PER_DAY,
// December shoud never be encountered in this loop since it's right-exclusive
_ => panic!("RTC code has an internal error, months encountered an 'impossible' value"),
}
}
// figure out what the last round multiple of leap years was before the current time
let last_leap = (to_binary(settings[YEARS]) - to_binary(settings[YEARS]) % 4) as u64;
// now add the contributions of all these prior years
total_secs += (last_leap / 4) * (365 * 3 + 366) * SECS_PER_DAY;
// now add the contributions of any years since the last round multiple of leap years
if to_binary(settings[YEARS]) % 4 != 0 {
// account for the leap year
total_secs += 366 * SECS_PER_DAY;
// now account for successive years
total_secs += 365 * (((to_binary(settings[YEARS]) % 4) - 1) as u64) * SECS_PER_DAY;
}
Some(total_secs) | } | random_line_split |
|
mod.rs | //! methods!
//!
//! Aside from the cognitive complexity of having so many methods on a single
//! trait, this approach had numerous other drawbacks as well:
//!
//! - Implementations that did not implement all available protocol extensions
//! still had to "pay" for the unused packet parsing/handler code, resulting
//! in substantial code bloat, even on `no_std` platforms.
//! - `GdbStub`'s internal implementation needed to include _runtime_ checks to
//! deal with incorrectly implemented `Target`s.
//! - No way to enforce "mutually-dependent" trait methods at compile-time.
//! - e.g: When implementing hardware breakpoint extensions, targets
//! _must_ implement both the `add_breakpoint` and
//! `remove_breakpoints` methods.
//! - No way to enforce "mutually-exclusive" trait methods at compile-time.
//! - e.g: The `resume` method for single-threaded targets has a much
//! simpler API than for multi-threaded targets, but it would be
//! incorrect for a target to implement both.
//!
//! At first blush, it seems the the solution to all these issues is obvious:
//! simply tie each protocol extension to a `cargo` feature! And yes, while
//! would would indeed work, there would be several serious ergonomic drawbacks:
//!
//! - There would be _hundreds_ of individual feature flags that would need to
//! be toggled by end users.
//! - It would be functionally impossible to _test_ all permutations of
//! enabled/disabled cargo features.
//! - A single binary would need to rely on some [non-trivial `cargo`-fu](https://github.com/rust-lang/cargo/issues/674)
//! in order to have multiple `Target` implementations in a single binary.
//!
//! After much experimentation and iteration, `gdbstub` ended up taking a
//! radically different approach to implementing and enumerating available
//! features, using a technique called **Inlineable Dyn Extension Traits**.
//!
//! > _Author's note:_ As far as I can tell, this isn't a very well-known trick,
//! or at the very least, I've personally never encountered any library that
//! uses this sort of API. As such, I've decided to be a bit cheeky and give it
//! a name! At some point, I'm hoping to write a standalone blog post which
//! further explores this technique, comparing it to other/existing approaches,
//! and diving into details of the how the compiler optimizes this sort of code.
//! In fact, I've already got a [very rough github repo](https://github.com/daniel5151/optional-trait-methods) with some of my
//! findings.
//!
//! So, what are "Inlineable Dyn Extension Traits"? Well, let's break it down:
//!
//! - **Extension Traits** - A common [Rust convention](https://rust-lang.github.io/rfcs/0445-extension-trait-conventions.html#what-is-an-extension-trait)
//! to extend the functionality of a Trait, _without_ modifying the original
//! trait.
//! - **Dyn** - Alludes to the use of Dynamic Dispatch via [Trait Objects](https://doc.rust-lang.org/book/ch17-02-trait-objects.html).
//! - **Inlineable** - Alludes to the fact that this approach can be easily
//! inlined, making it a truly zero-cost abstraction.
//!
//! In a nutshell, Inlineable Dyn Extension Traits (or IDETs) are an abuse of
//! the Rust trait system + modern compiler optimizations to emulate zero-cost,
//! runtime-enumerable optional trait methods!
//!
//! #### Technical overview
//!
//! The basic principles behind Inlineable Dyn Extension Traits are best
//! explained though example:
//!
//! Lets say we want to add an optional protocol extension described by an
//! `ProtocolExt` trait to a base `Protocol` trait. How would we do that using
//! IDETs?
//!
//! - (library) Define a `trait ProtocolExt: Protocol { ... }` which includes
//! all the methods required by the protocol extension:
//! - _Note:_ Making `ProtocolExt` a subtrait of `Protocol` is not strictly
//! required, but it does enable transparently using `Protocol`'s
//! associated types as part of `ProtocolExt`'s method definitions.
//!
//! ```rust,ignore
//! /// `foo` and `bar` are mutually-dependent methods.
//! trait ProtocolExt: Protocol {
//! fn foo(&self);
//! // can use associated types in method signature!
//! fn bar(&mut self) -> Result<(), Self::Error>;
//! }
//! ```
//!
//! - (library) "Associate" the `ProtocolExt` extension trait to the original
//! `Protocol` trait by adding a new `Protocol` method that "downcasts" `self`
//! into a `&mut dyn ProtocolExt`.
//!
//! ```rust,ignore
//! trait Protocol {
//! // ... other methods ...
//!
//! // Optional extension
//! #[inline(always)]
//! fn get_protocol_ext(&mut self) -> Option<ProtocolExtOps<Self>> {
//! // disabled by default
//! None
//! }
//!
//! // Mutually-exclusive extensions
//! fn get_ext_a_or_b(&mut self) -> EitherOrExt<Self::Arch, Self::Error>;
//! }
//!
//! // Using a typedef for readability
//! type ProtocolExtOps<T> =
//! &'a mut dyn ProtocolExt<Arch = <T as Protocol>::Arch, Error = <T as Protocol>::Error>;
//!
//! enum EitherOrExt<A, E> {
//! ProtocolExtA(&'a mut dyn ProtocolExtA<Arch = A, Error = E>),
//! ProtocolExtB(&'a mut dyn ProtocolExtB<Arch = A, Error = E>),
//! }
//! ```
//!
//! - (user) Implements the `ProtocolExt` extension for their target (just like
//! a normal trait).
//!
//! ```rust,ignore
//! impl ProtocolExt for MyTarget {
//! fn foo(&self) { ... }
//! fn bar(&mut self) -> Result<(), Self::Error> { ... }
//! }
//! ```
//!
//! - (user) Implements the base `Protocol` trait, overriding the
//! `get_protocol_ext` method to return `Some(self)`, which will effectively
//! "enable" the extension.
//!
//! ```rust,ignore
//! impl Protocol for MyTarget {
//! // Optional extension
//! #[inline(always)]
//! fn get_protocol_ext(&mut self) -> Option<ProtocolExtOps<Self>> {
//! Some(self) // will not compile unless `MyTarget` also implements `ProtocolExt`
//! }
//!
//! // Mutually-exclusive extensions
//! #[inline(always)]
//! fn get_ext_a_or_b(&mut self) -> EitherOrExt<Self::Arch, Self::Error> {
//! EitherOrExt::ProtocolExtA(self)
//! }
//! }
//! ```
//!
//! > Please note the use of `#[inline(always)]` when enabling IDET methods.
//! While LLVM is usually smart enough to inline single-level IDETs (such as in
//! the example above), nested IDETs will often require a bit of "help" from the
//! `inline` directive to be correctly optimized.
//!
//! Now, here's where IDETs really shine: If the user didn't implement
//! `ProtocolExt`, but _did_ try to enable the feature by overriding
//! `get_protocol_ext` to return `Some(self)`, they'll get a compile-time error
//! that looks something like this:
//!
//! ```text
//! error[E0277]: the trait bound `MyTarget: ProtocolExt` is not satisfied
//! --> path/to/implementation.rs:44:14
//! |
//! 44 | Some(self)
//! | ^^^^ the trait `ProtocolExt` is not implemented for `MyTarget`
//! |
//! = note: required for the cast to the object type `dyn ProtocolExt<Arch = ..., Error = ...>`
//! ```
//!
//! The Rust compiler is preventing you from enabling a feature you haven't
//! implemented _at compile time!_
//!
//! - (library) Is able to _query_ whether or not an extension is available,
//! _without_ having to actually invoke any method on the target!
//!
//! ```rust,ignore
//! fn execute_protocol(mut target: impl Target) {
//! match target.get_protocol_ext() {
//! Some(ops) => ops.foo(),
//! None => { /* fallback when not enabled */ }
//! }
//! }
//! ```
//!
//! This is already pretty cool, but what's _even cooler_ is that if you take a
//! look at the generated assembly of a monomorphized `execute_protocol` method
//! (e.g: using godbolt.org), you'll find that the compiler is able to
//! efficiently inline and devirtualize _all_ the calls to `get_protocol_ext`
//! method, which in-turn allows the dead-code-eliminator to work its magic, and
//! remove the unused branches from the generated code! i.e: If a target
//! implemention didn't implement the `ProtocolExt` extension, then that `match`
//! statement in `execute_protocol` would simply turn into a noop!
//!
//! If IDETs are something you're interested in, consider checking out
//! [daniel5151/optional-trait-methods](https | //! to the extreme, would have resulted in literally _hundreds_ of associated | random_line_split |
|
main.rs | gl_Position = scale * vec4(0.5 * a_Pos, 0.0, 1.0);
}
";
const FS_SHADER: &'static str = "
#version 150 core
in vec4 v_Color;
out vec4 Target0;
void main() {
Target0 = v_Color;
}
";
const VERTICES: &'static [[f32;2];3] = &[
[-1.0, -0.57],
[ 1.0, -0.57],
[ 0.0, 1.15]
];
const COLORS: &'static [[f32;3];3] = &[
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0]
];
fn add_shader(program: gl::GLuint, src: &str, ty: gl::GLenum) {
let id = unsafe { gl::CreateShader(ty) };
if id == (0 as gl::GLuint) {
panic!("Failed to create shader type: {:?}", ty);
}
let mut source = Vec::new();
source.extend_from_slice(src.as_bytes());
gl::shader_source(id, &[&source[..]]);
gl::compile_shader(id);
let log = gl::get_shader_info_log(id);
if gl::get_shader_iv(id, gl::COMPILE_STATUS) == (0 as gl::GLint) {
panic!("Failed to compile shader:\n{}", log);
} else {
if !log.is_empty() {
println!("Warnings detected on shader:\n{}", log);
}
gl::attach_shader(program, id);
}
}
fn compile_shaders(handles: &GlHandles) {
handles.program.set(gl::create_program());
if handles.program.get() == (0 as gl::GLuint) {
panic!("Failed to create shader program");
}
add_shader(handles.program.get(), VS_SHADER, gl::VERTEX_SHADER);
add_shader(handles.program.get(), FS_SHADER, gl::FRAGMENT_SHADER);
gl::link_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::LINK_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to link shader program: \n{}", error_log);
}
gl::validate_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::VALIDATE_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to validate shader program: \n{}", error_log);
}
gl::use_program(handles.program.get());
}
fn gl_draw(handles: &GlHandles) {
gl::clear_color(CLEAR_COLOR.0, CLEAR_COLOR.1, CLEAR_COLOR.2, CLEAR_COLOR.3);
gl::clear(gl::COLOR_BUFFER_BIT);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[0]);
gl::buffer_data(gl::ARRAY_BUFFER, VERTICES, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(0 as gl::GLuint,
2,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(0 as gl::GLuint);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[1]);
gl::buffer_data(gl::ARRAY_BUFFER, COLORS, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(1 as gl::GLuint,
3,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(1 as gl::GLuint);
handles.scale.set(handles.scale.get() + 0.01);
let scale = handles.scale.get();
let rot_matrix = [
scale.cos(), -1.0 * scale.sin(), 0.0, 0.0,
scale.sin(), scale.cos(), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
];
let scale_pos = gl::get_uniform_location(handles.program.get(), "scale");
gl::uniform_matrix_4fv(scale_pos, false, &rot_matrix);
gl::draw_arrays(gl::TRIANGLES, 0, 3);
}
pub fn main() {
gl::load_with(|s| GLContext::<NativeGLContext>::get_proc_address(s) as *const _);
let offscreen_ctx = GLContext::<NativeGLContext>::new(Size2D::new(256, 256),
GLContextAttributes::default(),
ColorAttachmentType::Renderbuffer,
None).unwrap();
offscreen_ctx.make_current().unwrap();
let handles: GlHandles = GlHandles::new();
// Create VAO and VBOs
handles.vao.set(gl::gen_vertex_arrays(1)[0]);
gl::bind_vertex_array(handles.vao.get());
let buffer_ids = gl::gen_buffers(2);
handles.vbos.set([buffer_ids[0], buffer_ids[1]]);
compile_shaders(&handles);
// Create FBO and bind it
let fbo = gl::gen_framebuffers(1)[0];
gl::bind_framebuffer(gl::FRAMEBUFFER, fbo);
let extensions = vulkano_win::required_extensions();
let instance = vulkano::instance::Instance::new(None, &extensions, &[]).expect("failed to create instance");
let physical = vulkano::instance::PhysicalDevice::enumerate(&instance)
.next().expect("no device available");
println!("Using device: {} (type: {:?})", physical.name(), physical.ty());
let window = winit::WindowBuilder::new().build_vk_surface(&instance).unwrap();
let queue = physical.queue_families().find(|q| q.supports_graphics() &&
window.surface().is_supported(q).unwrap_or(false))
.expect("couldn't find a graphical queue family");
let device_ext = vulkano::device::DeviceExtensions {
khr_swapchain: true,
.. vulkano::device::DeviceExtensions::none()
};
let (device, mut queues) = vulkano::device::Device::new(&physical, physical.supported_features(),
&device_ext, [(queue, 0.5)].iter().cloned())
.expect("failed to create device");
let queue = queues.next().unwrap();
let (swapchain, images) = {
let caps = window.surface().get_capabilities(&physical).expect("failed to get surface capabilities");
let dimensions = caps.current_extent.unwrap_or([WIN_WIDTH as u32, WIN_HEIGTH as u32]);
let present = caps.present_modes.iter().next().unwrap();
let usage = caps.supported_usage_flags;
vulkano::swapchain::Swapchain::new(&device, &window.surface(), caps.min_image_count,
vulkano::format::B8G8R8A8Srgb, dimensions, 1,
&usage, &queue, vulkano::swapchain::SurfaceTransform::Identity,
vulkano::swapchain::CompositeAlpha::Opaque,
present, true, None).expect("failed to create swapchain")
};
#[derive(Debug, Clone)]
struct Vertex { position: [f32; 2] }
impl_vertex!(Vertex, position);
let vertex_buffer = vulkano::buffer::cpu_access::CpuAccessibleBuffer::<[Vertex]>
::from_iter(&device, &vulkano::buffer::BufferUsage::all(),
Some(queue.family()), [
Vertex { position: [-0.5, -0.5 ] },
Vertex { position: [-0.5, 0.5 ] },
Vertex { position: [ 0.5, -0.5 ] },
Vertex { position: [ 0.5, 0.5 ] },
].iter().cloned()).expect("failed to create buffer");
mod vs { include!{concat!(env!("OUT_DIR"), "/shaders/src/shader/image_vs.glsl")} }
let vs = vs::Shader::load(&device).expect("failed to create shader module");
mod fs { include!{concat!(env!("OUT_DIR"), "/shaders/src/shader/image_fs.glsl")} }
let fs = fs::Shader::load(&device).expect("failed to create shader module");
mod renderpass {
single_pass_renderpass!{
attachments: {
color: {
load: Clear,
store: Store,
format: ::vulkano::format::B8G8R8A8Srgb,
}
},
pass: {
color: [color],
depth_stencil: {}
}
}
}
let renderpass = renderpass::CustomRenderPass::new(&device, &renderpass::Formats {
color: (vulkano::format::B8G8R8A8Srgb, 1)
}).unwrap();
let texture = vulkano::image::immutable::ImmutableImage::new(&device, vulkano::image:: | v_Color = vec4(a_Color, 1.0); | random_line_split |
|
main.rs | () -> GlHandles {
GlHandles {
vao: Cell::new(0 as gl::GLuint),
vbos: Cell::new([0,0]),
program: Cell::new(0 as gl::GLuint),
scale: Cell::new(0.0),
}
}
}
const CLEAR_COLOR: (f32, f32, f32, f32) = (0.0, 0.2, 0.3, 1.0);
const WIN_WIDTH: i32 = 256;
const WIN_HEIGTH: i32 = 256;
const VS_SHADER: &'static str = "
#version 150 core
in vec2 a_Pos;
in vec3 a_Color;
uniform mat4 scale;
out vec4 v_Color;
void main() {
v_Color = vec4(a_Color, 1.0);
gl_Position = scale * vec4(0.5 * a_Pos, 0.0, 1.0);
}
";
const FS_SHADER: &'static str = "
#version 150 core
in vec4 v_Color;
out vec4 Target0;
void main() {
Target0 = v_Color;
}
";
const VERTICES: &'static [[f32;2];3] = &[
[-1.0, -0.57],
[ 1.0, -0.57],
[ 0.0, 1.15]
];
const COLORS: &'static [[f32;3];3] = &[
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0]
];
fn add_shader(program: gl::GLuint, src: &str, ty: gl::GLenum) {
let id = unsafe { gl::CreateShader(ty) };
if id == (0 as gl::GLuint) {
panic!("Failed to create shader type: {:?}", ty);
}
let mut source = Vec::new();
source.extend_from_slice(src.as_bytes());
gl::shader_source(id, &[&source[..]]);
gl::compile_shader(id);
let log = gl::get_shader_info_log(id);
if gl::get_shader_iv(id, gl::COMPILE_STATUS) == (0 as gl::GLint) {
panic!("Failed to compile shader:\n{}", log);
} else {
if !log.is_empty() {
println!("Warnings detected on shader:\n{}", log);
}
gl::attach_shader(program, id);
}
}
fn compile_shaders(handles: &GlHandles) {
handles.program.set(gl::create_program());
if handles.program.get() == (0 as gl::GLuint) {
panic!("Failed to create shader program");
}
add_shader(handles.program.get(), VS_SHADER, gl::VERTEX_SHADER);
add_shader(handles.program.get(), FS_SHADER, gl::FRAGMENT_SHADER);
gl::link_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::LINK_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to link shader program: \n{}", error_log);
}
gl::validate_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::VALIDATE_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to validate shader program: \n{}", error_log);
}
gl::use_program(handles.program.get());
}
fn gl_draw(handles: &GlHandles) {
gl::clear_color(CLEAR_COLOR.0, CLEAR_COLOR.1, CLEAR_COLOR.2, CLEAR_COLOR.3);
gl::clear(gl::COLOR_BUFFER_BIT);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[0]);
gl::buffer_data(gl::ARRAY_BUFFER, VERTICES, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(0 as gl::GLuint,
2,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(0 as gl::GLuint);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[1]);
gl::buffer_data(gl::ARRAY_BUFFER, COLORS, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(1 as gl::GLuint,
3,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(1 as gl::GLuint);
handles.scale.set(handles.scale.get() + 0.01);
let scale = handles.scale.get();
let rot_matrix = [
scale.cos(), -1.0 * scale.sin(), 0.0, 0.0,
scale.sin(), scale.cos(), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
];
let scale_pos = gl::get_uniform_location(handles.program.get(), "scale");
gl::uniform_matrix_4fv(scale_pos, false, &rot_matrix);
gl::draw_arrays(gl::TRIANGLES, 0, 3);
}
pub fn main() {
gl::load_with(|s| GLContext::<NativeGLContext>::get_proc_address(s) as *const _);
let offscreen_ctx = GLContext::<NativeGLContext>::new(Size2D::new(256, 256),
GLContextAttributes::default(),
ColorAttachmentType::Renderbuffer,
None).unwrap();
offscreen_ctx.make_current().unwrap();
let handles: GlHandles = GlHandles::new();
// Create VAO and VBOs
handles.vao.set(gl::gen_vertex_arrays(1)[0]);
gl::bind_vertex_array(handles.vao.get());
let buffer_ids = gl::gen_buffers(2);
handles.vbos.set([buffer_ids[0], buffer_ids[1]]);
compile_shaders(&handles);
// Create FBO and bind it
let fbo = gl::gen_framebuffers(1)[0];
gl::bind_framebuffer(gl::FRAMEBUFFER, fbo);
let extensions = vulkano_win::required_extensions();
let instance = vulkano::instance::Instance::new(None, &extensions, &[]).expect("failed to create instance");
let physical = vulkano::instance::PhysicalDevice::enumerate(&instance)
.next().expect("no device available");
println!("Using device: {} (type: {:?})", physical.name(), physical.ty());
let window = winit::WindowBuilder::new().build_vk_surface(&instance).unwrap();
let queue = physical.queue_families().find(|q| q.supports_graphics() &&
window.surface().is_supported(q).unwrap_or(false))
.expect("couldn't find a graphical queue family");
let device_ext = vulkano::device::DeviceExtensions {
khr_swapchain: true,
.. vulkano::device::DeviceExtensions::none()
};
let (device, mut queues) = vulkano::device::Device::new(&physical, physical.supported_features(),
&device_ext, [(queue, 0.5)].iter().cloned())
.expect("failed to create device");
let queue = queues.next().unwrap();
let (swapchain, images) = {
let caps = window.surface().get_capabilities(&physical).expect("failed to get surface capabilities");
let dimensions = caps.current_extent.unwrap_or([WIN_WIDTH as u32, WIN_HEIGTH as u32]);
let present = caps.present_modes.iter().next().unwrap();
let usage = caps.supported_usage_flags;
vulkano::swapchain::Swapchain::new(&device, &window.surface(), caps.min_image_count,
vulkano::format::B8G8R8A8Srgb, dimensions, 1,
&usage, &queue, vulkano::swapchain::SurfaceTransform::Identity,
vulkano::swapchain::CompositeAlpha::Opaque,
present, true, None).expect("failed to create swapchain")
};
#[derive(Debug, Clone)]
struct Vertex { position: [f32; 2] }
impl_vertex!(Vertex, position);
let vertex_buffer = vulkano::buffer::cpu_access::CpuAccessibleBuffer::<[Vertex]>
::from_iter(&device, &vulkano::buffer::BufferUsage::all(),
Some(queue.family()), [
Vertex { position: [-0.5, -0.5 ] },
Vertex { position: [-0.5, 0.5 ] },
Vertex { position: [ 0.5, -0.5 ] },
Vertex { position: [ 0.5, 0.5 ] },
].iter().cloned()).expect("failed to create buffer");
mod vs { include!{concat!(env!("OUT_DIR"), "/shaders/src/shader/image_vs.glsl")} }
let vs = vs::Shader::load(&device).expect("failed to create shader module");
mod fs { include!{concat!(env!("OUT_DIR | new | identifier_name |
|
main.rs | = &[
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0]
];
fn add_shader(program: gl::GLuint, src: &str, ty: gl::GLenum) {
let id = unsafe { gl::CreateShader(ty) };
if id == (0 as gl::GLuint) {
panic!("Failed to create shader type: {:?}", ty);
}
let mut source = Vec::new();
source.extend_from_slice(src.as_bytes());
gl::shader_source(id, &[&source[..]]);
gl::compile_shader(id);
let log = gl::get_shader_info_log(id);
if gl::get_shader_iv(id, gl::COMPILE_STATUS) == (0 as gl::GLint) {
panic!("Failed to compile shader:\n{}", log);
} else |
}
fn compile_shaders(handles: &GlHandles) {
handles.program.set(gl::create_program());
if handles.program.get() == (0 as gl::GLuint) {
panic!("Failed to create shader program");
}
add_shader(handles.program.get(), VS_SHADER, gl::VERTEX_SHADER);
add_shader(handles.program.get(), FS_SHADER, gl::FRAGMENT_SHADER);
gl::link_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::LINK_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to link shader program: \n{}", error_log);
}
gl::validate_program(handles.program.get());
if gl::get_program_iv(handles.program.get(), gl::VALIDATE_STATUS) == (0 as gl::GLint) {
let error_log = gl::get_program_info_log(handles.program.get());
panic!("Failed to validate shader program: \n{}", error_log);
}
gl::use_program(handles.program.get());
}
fn gl_draw(handles: &GlHandles) {
gl::clear_color(CLEAR_COLOR.0, CLEAR_COLOR.1, CLEAR_COLOR.2, CLEAR_COLOR.3);
gl::clear(gl::COLOR_BUFFER_BIT);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[0]);
gl::buffer_data(gl::ARRAY_BUFFER, VERTICES, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(0 as gl::GLuint,
2,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(0 as gl::GLuint);
gl::bind_buffer(gl::ARRAY_BUFFER, handles.vbos.get()[1]);
gl::buffer_data(gl::ARRAY_BUFFER, COLORS, gl::STATIC_DRAW);
gl::vertex_attrib_pointer(1 as gl::GLuint,
3,
gl::FLOAT,
false,
0 as gl::GLint,
0);
gl::enable_vertex_attrib_array(1 as gl::GLuint);
handles.scale.set(handles.scale.get() + 0.01);
let scale = handles.scale.get();
let rot_matrix = [
scale.cos(), -1.0 * scale.sin(), 0.0, 0.0,
scale.sin(), scale.cos(), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
];
let scale_pos = gl::get_uniform_location(handles.program.get(), "scale");
gl::uniform_matrix_4fv(scale_pos, false, &rot_matrix);
gl::draw_arrays(gl::TRIANGLES, 0, 3);
}
pub fn main() {
gl::load_with(|s| GLContext::<NativeGLContext>::get_proc_address(s) as *const _);
let offscreen_ctx = GLContext::<NativeGLContext>::new(Size2D::new(256, 256),
GLContextAttributes::default(),
ColorAttachmentType::Renderbuffer,
None).unwrap();
offscreen_ctx.make_current().unwrap();
let handles: GlHandles = GlHandles::new();
// Create VAO and VBOs
handles.vao.set(gl::gen_vertex_arrays(1)[0]);
gl::bind_vertex_array(handles.vao.get());
let buffer_ids = gl::gen_buffers(2);
handles.vbos.set([buffer_ids[0], buffer_ids[1]]);
compile_shaders(&handles);
// Create FBO and bind it
let fbo = gl::gen_framebuffers(1)[0];
gl::bind_framebuffer(gl::FRAMEBUFFER, fbo);
let extensions = vulkano_win::required_extensions();
let instance = vulkano::instance::Instance::new(None, &extensions, &[]).expect("failed to create instance");
let physical = vulkano::instance::PhysicalDevice::enumerate(&instance)
.next().expect("no device available");
println!("Using device: {} (type: {:?})", physical.name(), physical.ty());
let window = winit::WindowBuilder::new().build_vk_surface(&instance).unwrap();
let queue = physical.queue_families().find(|q| q.supports_graphics() &&
window.surface().is_supported(q).unwrap_or(false))
.expect("couldn't find a graphical queue family");
let device_ext = vulkano::device::DeviceExtensions {
khr_swapchain: true,
.. vulkano::device::DeviceExtensions::none()
};
let (device, mut queues) = vulkano::device::Device::new(&physical, physical.supported_features(),
&device_ext, [(queue, 0.5)].iter().cloned())
.expect("failed to create device");
let queue = queues.next().unwrap();
let (swapchain, images) = {
let caps = window.surface().get_capabilities(&physical).expect("failed to get surface capabilities");
let dimensions = caps.current_extent.unwrap_or([WIN_WIDTH as u32, WIN_HEIGTH as u32]);
let present = caps.present_modes.iter().next().unwrap();
let usage = caps.supported_usage_flags;
vulkano::swapchain::Swapchain::new(&device, &window.surface(), caps.min_image_count,
vulkano::format::B8G8R8A8Srgb, dimensions, 1,
&usage, &queue, vulkano::swapchain::SurfaceTransform::Identity,
vulkano::swapchain::CompositeAlpha::Opaque,
present, true, None).expect("failed to create swapchain")
};
#[derive(Debug, Clone)]
struct Vertex { position: [f32; 2] }
impl_vertex!(Vertex, position);
let vertex_buffer = vulkano::buffer::cpu_access::CpuAccessibleBuffer::<[Vertex]>
::from_iter(&device, &vulkano::buffer::BufferUsage::all(),
Some(queue.family()), [
Vertex { position: [-0.5, -0.5 ] },
Vertex { position: [-0.5, 0.5 ] },
Vertex { position: [ 0.5, -0.5 ] },
Vertex { position: [ 0.5, 0.5 ] },
].iter().cloned()).expect("failed to create buffer");
mod vs { include!{concat!(env!("OUT_DIR"), "/shaders/src/shader/image_vs.glsl")} }
let vs = vs::Shader::load(&device).expect("failed to create shader module");
mod fs { include!{concat!(env!("OUT_DIR"), "/shaders/src/shader/image_fs.glsl")} }
let fs = fs::Shader::load(&device).expect("failed to create shader module");
mod renderpass {
single_pass_renderpass!{
attachments: {
color: {
load: Clear,
store: Store,
format: ::vulkano::format::B8G8R8A8Srgb,
}
},
pass: {
color: [color],
depth_stencil: {}
}
}
}
let renderpass = renderpass::CustomRenderPass::new(&device, &renderpass::Formats {
color: (vulkano::format::B8G8R8A8Srgb, 1)
}).unwrap();
let texture = vulkano::image::immutable::ImmutableImage::new(&device, vulkano::image::Dimensions::Dim2d { width: WIN_WIDTH as u32, height: WIN_HEIGTH as u32 },
vulkano::format::R8G8B8A8Unorm, Some(queue.family())).unwrap();
let sampler = vulkano::sampler::Sampler::new(&device, vulkano::sampler::Filter::Linear,
vulkano::sampler::Filter::Linear, vulkano::sampler::MipmapMode::Nearest,
vulkano::sampler::SamplerAddressMode::Repeat,
vulkano::sampler::SamplerAddressMode::Repeat,
vulkano::sampler::SamplerAddressMode::Repeat,
0.0, 1.0, 0.0, | {
if !log.is_empty() {
println!("Warnings detected on shader:\n{}", log);
}
gl::attach_shader(program, id);
} | conditional_block |
main.go | : in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] deactivation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = deactivateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func check(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqCheck++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrCheck++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrCheck++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrCheck++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
//var ip6s = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var ip6c = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var si = make([]Id, 2)
for i := range si {
var dst = e.Opt
var udp = "udp4"
if i == 1 {
udp = "udp6"
dst = ip6c
}
if rtt, tgt, err := pingSession(dst, udp); err != nil {
si[i] = Id{ErrNo: EINVAL, Opt: tgt}
event(logwarn, li, err.Error())
} else {
si[i] = Id{Id: int64(rtt), Opt: tgt}
}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func status(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqStatus++
var buf, _ = json.Marshal(&stat)
sendResponse(w, &Msg{Data: string(buf)})
return
}
func serverInfo() (err error) {
var data = &RebanaRequestMsg{UserId: 102, Command: "server-info",
Data: app.HostName}
var buf, _ = json.Marshal(data)
var url = app.RebanaUrl + "/v/info"
var rd = bytes.NewReader(buf)
var req *http.Request
if req, err = http.NewRequest("POST", url, rd); err != nil {
return
}
var loc = &time.Location{}
if loc, err = time.LoadLocation("Etc/GMT"); err != nil {
return
}
req.Header.Add("Date", time.Now().In(loc).Format(time.RFC1123))
req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json")
req.Header.Add("X-N3-Service-Name", "rebana")
req.Header.Add("X-N3-Tunnel-Server", app.HostName)
req.Header.Add("X-N3-Signature", signRequest(buf, 0))
var c = &http.Client{}
c.Transport = &http.Transport{TLSClientConfig: tlsc}
var res *http.Response
if res, err = c.Do(req); err != nil {
return
}
defer res.Body.Close()
var t time.Time
if t, err = checkResponseHeader(res); err != nil {
return
}
var msg = &RebanaMsg{}
if err = json.NewDecoder(res.Body).Decode(msg); err != nil {
return
}
var sig = res.Header.Get("X-N3-Signature")
buf, _ = json.Marshal(msg)
if err = checkSignature(sig, buf); err != nil {
stat.ReqErrSignature++
return
}
if msg.ErrNo != EOK {
return errors.New(msg.Data)
}
var sv = &ServerInfo{}
if err = json.Unmarshal([]byte(msg.Data), sv); err != nil {
return
}
var ipf int
if ipf, err = checkIPFamily(sv.TunSrc); ipf != 4 {
return
}
if !strings.Contains(sv.PpPrefix, "::/48") ||
!strings.Contains(sv.RtPrefix, "::/48") {
return
}
app.SvInfo = &ServerInfo{}
app.SvInfo.Id = sv.Id
app.SvInfo.TunSrc = sv.TunSrc
app.SvInfo.PpPrefix = sv.PpPrefix
app.SvInfo.RtPrefix = sv.RtPrefix
app.SvInfo.Session = sv.Session
var sl = "Active tunnel session(s): "
for i := range sv.Session {
var e = sv.Session[i]
var sa = fmt.Sprintf("%v[%v]", e.Id, e.Dst)
sl += sa
if i < (len(sv.Session) - 1) {
sl += ", "
}
}
var ts = t.In(time.Local).Format(time.RFC1123)
event(logdebug, li, "Server information retrieved: "+
"[Timestamp: %v, Server name: %v[%v], "+
"Tunnel source address: %v, "+
"Tunnel point-to-point prefix: %v, "+
"Tunnel routed prefix: %v, Active tunnel sessions: %v]",
ts, app.HostName, sv.Id, sv.TunSrc, sv.PpPrefix, sv.RtPrefix,
len(sv.Session))
return
}
func defaultHandler(w http.ResponseWriter, r *http.Request) {
stat.ReqAll++
var err error
var str = "Invalid request"
if err = checkUrl(r); err != nil {
stat.ReqErrHeader++
sendError(w, EINVAL, str, err)
return
}
if err = checkHeader(r); err != nil {
stat.ReqErrHeader++
sendError(w, EINVAL, str, err)
return
}
var d *RequestMsg
if d, err = checkData(r); err != nil {
stat.ReqErrPayload++
sendError(w, EINVAL, str, err)
return
}
event(logdebug, li, "Processing request [%v:%v]", d.Command, d.MsgId)
switch d.Command {
case "activate":
err = activate(w, d)
case "deactivate":
err = deactivate(w, d)
case "check":
err = check(w, d)
case "status":
err = status(w, d)
}
if err != nil {
str += ": " + d.Command
sendError(w, EINVAL, str, err)
return
}
event(logdebug, li, "Request [%v:%v] completed", d.Command, d.MsgId)
}
func main() {
var help, debug bool
var conf string
flag.BoolVar(&debug, "d", false, "Debug mode")
flag.BoolVar(&help, "h", false, "Display usage")
flag.StringVar(&conf, "c", CONFFILE, "Configuration file")
flag.Parse()
if help {
usage()
}
app = &AppConfig{ProgName: APPNAME, Version: APPVER, Pid: os.Getpid()}
stat = &AppStat{HostName: app.HostName}
var err error
if err = parseConfig(conf); err != nil {
fatal(err.Error())
}
if err = setupLog(debug); err != nil {
fatal(err.Error())
}
go sigHandler()
event(loginfo, li, "%v-%v server started: %v", app.ProgName, app.Version,
app.HostName)
if err = setupServer(); err != nil {
fatal(err.Error())
}
if err = serverInfo(); err != nil {
fatal(err.Error())
}
var pid = fmt.Sprintf("%v", app.Pid)
if err = ioutil.WriteFile(PIDFILE, []byte(pid), 0644); err != nil {
fatal(err.Error())
}
select {}
}
func sigHandler() {
var quit bool
var c = make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGINT, syscall.SIGTERM)
var signal = <-c
event(lognotice, li, "Signal received: "+signal.String())
switch signal {
case syscall.SIGINT, syscall.SIGTERM:
quit = true
}
if quit {
event(lognotice, li, "Terminating..")
os.Remove(PIDFILE)
os.Exit(0)
}
}
func usage() | {
var str = fmt.Sprintf("%v-%v\nusage: %v [-d] [-h] [-c config file]\n",
APPNAME, APPVER, APPNAME)
fmt.Fprintf(os.Stderr, str)
os.Exit(1)
} | identifier_body |
|
main.go | 64
ReqError int64
ReqErrUrl int64
ReqErrHeader int64
ReqErrPayload int64
ReqErrSignature int64
ReqErrServerId int64
ReqErrUserId int64
ReqErrMsgId int64
ReqErrCommand int64
ReqErrData int64
ReqErrActivate int64
ReqErrDeactivate int64
ReqErrCheck int64
ReqErrStatus int64
}
const (
APPNAME = "rebanats"
APPVER = "1.0.0"
PIDFILE = "/var/run/rebanats.pid"
CONFFILE = "/usr/local/etc/rebanats.json"
// error codes
EOK = 0
EINVAL = 1
EAGAIN = 2
ENOENT = 3
)
var (
app *AppConfig
stat *AppStat
tlsc *tls.Config
)
func activate(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqActivate++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrActivate++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrActivate++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrActivate++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
var rt = strings.Split(app.SvInfo.RtPrefix, "::/")
var ifname = fmt.Sprintf("gif%v", e.Id)
var in6addr = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var in6dest = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var rt6dest = fmt.Sprintf("%v:%v::", rt[0], e.Id)
var s = &CSession{Ifname: ifname, TunAddr: app.SvInfo.TunSrc,
TunDest: e.Opt, In6Addr: in6addr, In6Dest: in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] activation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = activateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func deactivate(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqDeactivate++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrDeactivate++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrDeactivate++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrDeactivate++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
var rt = strings.Split(app.SvInfo.RtPrefix, "::/")
var ifname = fmt.Sprintf("gif%v", e.Id)
var in6addr = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var in6dest = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var rt6dest = fmt.Sprintf("%v:%v::", rt[0], e.Id)
var s = &CSession{Ifname: ifname, TunAddr: app.SvInfo.TunSrc,
TunDest: e.Opt, In6Addr: in6addr, In6Dest: in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] deactivation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = deactivateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func check(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqCheck++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrCheck++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrCheck++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrCheck++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
//var ip6s = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var ip6c = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var si = make([]Id, 2)
for i := range si {
var dst = e.Opt
var udp = "udp4"
if i == 1 {
udp = "udp6"
dst = ip6c
}
if rtt, tgt, err := pingSession(dst, udp); err != nil {
si[i] = Id{ErrNo: EINVAL, Opt: tgt}
event(logwarn, li, err.Error())
} else {
si[i] = Id{Id: int64(rtt), Opt: tgt}
}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func status(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqStatus++
var buf, _ = json.Marshal(&stat)
sendResponse(w, &Msg{Data: string(buf)})
return
}
func | () (err error) {
var data = &RebanaRequestMsg{UserId: 102, Command: "server-info",
Data: app.HostName}
var buf, _ = json.Marshal(data)
var url = app.RebanaUrl + "/v/info"
var rd = bytes.NewReader(buf)
var req *http.Request
if req, err = http.NewRequest("POST", url, rd); err != nil {
return
}
var loc = &time.Location{}
if loc, err = time.LoadLocation("Etc/GMT"); err != nil {
return
}
req.Header.Add("Date", time.Now().In(loc).Format(time.RFC1123))
req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json")
req.Header.Add("X-N3-Service-Name", "rebana")
req.Header.Add("X-N3-Tunnel-Server", app.HostName)
req.Header.Add("X-N3-Signature", signRequest(buf, 0))
var c = &http.Client{}
c.Transport = &http.Transport{TLSClientConfig: tlsc}
var res *http.Response
if res, err = c.Do(req); err != nil {
return
}
defer res.Body.Close()
var t time.Time
if t, err = checkResponseHeader(res); err != nil {
return
}
var msg = &RebanaMsg{}
if err = json.NewDecoder(res.Body).Decode(msg); err != nil {
return
}
var sig = res.Header.Get("X-N3-Signature")
buf, _ = json.Marshal(msg)
if err = checkSignature(sig, buf); err != nil {
stat.ReqErrSignature++
return
}
if msg.ErrNo != EOK {
return errors.New(msg.Data)
}
var sv = &ServerInfo{}
if err = json.Unmarshal([]byte(msg.Data), sv); err != nil {
return
}
var ipf int
if ipf, | serverInfo | identifier_name |
main.go | Opt string
}
type IdList struct {
Id int64
Entry []Id
}
type ServerInfo struct {
Id int64
TunSrc string
PpPrefix string
RtPrefix string
Session []Session
}
type Session struct {
Id int64
Dst string
Idx int64
}
type BindInfo struct {
Host string
Port string
}
type AppConfig struct {
HostName string `json:"ServerName"`
ProgName string
Version string
Pid int
Bind []BindInfo
RebanaUrl string
LogUrl string
Secret string
TLSCACert []string `json:"TLSCACert"`
SvInfo *ServerInfo
}
type AppStat struct {
HostName string
ReqAll int64
ReqActivate int64
ReqDeactivate int64
ReqCheck int64
ReqStatus int64
ReqError int64
ReqErrUrl int64
ReqErrHeader int64
ReqErrPayload int64
ReqErrSignature int64
ReqErrServerId int64
ReqErrUserId int64
ReqErrMsgId int64
ReqErrCommand int64
ReqErrData int64
ReqErrActivate int64
ReqErrDeactivate int64
ReqErrCheck int64
ReqErrStatus int64
}
const (
APPNAME = "rebanats"
APPVER = "1.0.0"
PIDFILE = "/var/run/rebanats.pid"
CONFFILE = "/usr/local/etc/rebanats.json"
// error codes
EOK = 0
EINVAL = 1
EAGAIN = 2
ENOENT = 3
)
var (
app *AppConfig
stat *AppStat
tlsc *tls.Config
)
func activate(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqActivate++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrActivate++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrActivate++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrActivate++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
var rt = strings.Split(app.SvInfo.RtPrefix, "::/")
var ifname = fmt.Sprintf("gif%v", e.Id)
var in6addr = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var in6dest = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var rt6dest = fmt.Sprintf("%v:%v::", rt[0], e.Id)
var s = &CSession{Ifname: ifname, TunAddr: app.SvInfo.TunSrc,
TunDest: e.Opt, In6Addr: in6addr, In6Dest: in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] activation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = activateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func deactivate(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqDeactivate++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrDeactivate++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrDeactivate++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrDeactivate++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
var rt = strings.Split(app.SvInfo.RtPrefix, "::/")
var ifname = fmt.Sprintf("gif%v", e.Id)
var in6addr = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var in6dest = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var rt6dest = fmt.Sprintf("%v:%v::", rt[0], e.Id)
var s = &CSession{Ifname: ifname, TunAddr: app.SvInfo.TunSrc,
TunDest: e.Opt, In6Addr: in6addr, In6Dest: in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] deactivation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = deactivateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func check(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqCheck++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrCheck++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrCheck++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrCheck++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
//var ip6s = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var ip6c = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var si = make([]Id, 2)
for i := range si {
var dst = e.Opt
var udp = "udp4"
if i == 1 {
udp = "udp6"
dst = ip6c
}
if rtt, tgt, err := pingSession(dst, udp); err != nil {
si[i] = Id{ErrNo: EINVAL, Opt: tgt}
event(logwarn, li, err.Error())
} else {
si[i] = Id{Id: int64(rtt), Opt: tgt}
}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func status(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqStatus++
var buf, _ = json.Marshal(&stat)
sendResponse(w, &Msg{Data: string(buf)})
return
}
func serverInfo() (err error) {
var data = &RebanaRequestMsg{UserId: 102, Command: "server-info",
Data: app.HostName}
var buf, _ = json.Marshal(data)
var url = app.RebanaUrl + "/v/info"
var rd = bytes.NewReader(buf)
var req *http.Request
if req, err = http.NewRequest("POST", url, rd); err != nil {
return
}
var loc = &time.Location{}
if loc, err = time.LoadLocation("Etc/GMT"); err != nil {
return
}
req.Header.Add("Date", time.Now().In(loc).Format(time.RFC1123))
req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json")
req.Header.Add("X-N3-Service-Name", "rebana")
req.Header.Add("X-N3-Tunnel-Server", app.HostName)
req.Header | Id int64
ErrNo int | random_line_split |
|
main.go | v:%v::", rt[0], e.Id)
var s = &CSession{Ifname: ifname, TunAddr: app.SvInfo.TunSrc,
TunDest: e.Opt, In6Addr: in6addr, In6Dest: in6dest,
In6Plen: uint(64), Rt6Dest: rt6dest, Rt6Plen: uint(64)}
event(logdebug, li, "Session [%v] deactivation parameters: [Interface "+
"name: %v, Tunnel source: %v, Tunnel destination: %v, Server "+
"inet6 address: %v/64, Client inet6 address: %v/64, Routed "+
"prefix: %v/64]", e.Id, ifname, app.SvInfo.TunSrc, e.Opt,
in6addr, in6dest, rt6dest)
var si []Id
if err = deactivateSession(s); err != nil {
si = []Id{Id{ErrNo: EINVAL}}
event(logwarn, li, err.Error())
} else {
si = []Id{Id{}}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func check(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqCheck++
var m *IdList
if m, err = getIdList(d.Data, d.Command); err != nil {
stat.ReqErrCheck++
return
}
var e = m.Entry[0]
if e.Id == 0 {
stat.ReqErrCheck++
return
}
var ipf int
if ipf, err = checkIPFamily(e.Opt); ipf != 4 {
stat.ReqErrCheck++
return
}
var pp = strings.Split(app.SvInfo.PpPrefix, "::/")
//var ip6s = fmt.Sprintf("%v:%v::1", pp[0], e.Id)
var ip6c = fmt.Sprintf("%v:%v::2", pp[0], e.Id)
var si = make([]Id, 2)
for i := range si {
var dst = e.Opt
var udp = "udp4"
if i == 1 {
udp = "udp6"
dst = ip6c
}
if rtt, tgt, err := pingSession(dst, udp); err != nil {
si[i] = Id{ErrNo: EINVAL, Opt: tgt}
event(logwarn, li, err.Error())
} else {
si[i] = Id{Id: int64(rtt), Opt: tgt}
}
}
var buf, _ = json.Marshal(&IdList{Entry: si})
sendResponse(w, &Msg{Data: string(buf)})
return nil
}
func status(w http.ResponseWriter, d *RequestMsg) (err error) {
stat.ReqStatus++
var buf, _ = json.Marshal(&stat)
sendResponse(w, &Msg{Data: string(buf)})
return
}
func serverInfo() (err error) {
var data = &RebanaRequestMsg{UserId: 102, Command: "server-info",
Data: app.HostName}
var buf, _ = json.Marshal(data)
var url = app.RebanaUrl + "/v/info"
var rd = bytes.NewReader(buf)
var req *http.Request
if req, err = http.NewRequest("POST", url, rd); err != nil {
return
}
var loc = &time.Location{}
if loc, err = time.LoadLocation("Etc/GMT"); err != nil {
return
}
req.Header.Add("Date", time.Now().In(loc).Format(time.RFC1123))
req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json")
req.Header.Add("X-N3-Service-Name", "rebana")
req.Header.Add("X-N3-Tunnel-Server", app.HostName)
req.Header.Add("X-N3-Signature", signRequest(buf, 0))
var c = &http.Client{}
c.Transport = &http.Transport{TLSClientConfig: tlsc}
var res *http.Response
if res, err = c.Do(req); err != nil {
return
}
defer res.Body.Close()
var t time.Time
if t, err = checkResponseHeader(res); err != nil {
return
}
var msg = &RebanaMsg{}
if err = json.NewDecoder(res.Body).Decode(msg); err != nil {
return
}
var sig = res.Header.Get("X-N3-Signature")
buf, _ = json.Marshal(msg)
if err = checkSignature(sig, buf); err != nil {
stat.ReqErrSignature++
return
}
if msg.ErrNo != EOK {
return errors.New(msg.Data)
}
var sv = &ServerInfo{}
if err = json.Unmarshal([]byte(msg.Data), sv); err != nil {
return
}
var ipf int
if ipf, err = checkIPFamily(sv.TunSrc); ipf != 4 {
return
}
if !strings.Contains(sv.PpPrefix, "::/48") ||
!strings.Contains(sv.RtPrefix, "::/48") {
return
}
app.SvInfo = &ServerInfo{}
app.SvInfo.Id = sv.Id
app.SvInfo.TunSrc = sv.TunSrc
app.SvInfo.PpPrefix = sv.PpPrefix
app.SvInfo.RtPrefix = sv.RtPrefix
app.SvInfo.Session = sv.Session
var sl = "Active tunnel session(s): "
for i := range sv.Session {
var e = sv.Session[i]
var sa = fmt.Sprintf("%v[%v]", e.Id, e.Dst)
sl += sa
if i < (len(sv.Session) - 1) {
sl += ", "
}
}
var ts = t.In(time.Local).Format(time.RFC1123)
event(logdebug, li, "Server information retrieved: "+
"[Timestamp: %v, Server name: %v[%v], "+
"Tunnel source address: %v, "+
"Tunnel point-to-point prefix: %v, "+
"Tunnel routed prefix: %v, Active tunnel sessions: %v]",
ts, app.HostName, sv.Id, sv.TunSrc, sv.PpPrefix, sv.RtPrefix,
len(sv.Session))
return
}
func defaultHandler(w http.ResponseWriter, r *http.Request) {
stat.ReqAll++
var err error
var str = "Invalid request"
if err = checkUrl(r); err != nil {
stat.ReqErrHeader++
sendError(w, EINVAL, str, err)
return
}
if err = checkHeader(r); err != nil {
stat.ReqErrHeader++
sendError(w, EINVAL, str, err)
return
}
var d *RequestMsg
if d, err = checkData(r); err != nil {
stat.ReqErrPayload++
sendError(w, EINVAL, str, err)
return
}
event(logdebug, li, "Processing request [%v:%v]", d.Command, d.MsgId)
switch d.Command {
case "activate":
err = activate(w, d)
case "deactivate":
err = deactivate(w, d)
case "check":
err = check(w, d)
case "status":
err = status(w, d)
}
if err != nil {
str += ": " + d.Command
sendError(w, EINVAL, str, err)
return
}
event(logdebug, li, "Request [%v:%v] completed", d.Command, d.MsgId)
}
func main() {
var help, debug bool
var conf string
flag.BoolVar(&debug, "d", false, "Debug mode")
flag.BoolVar(&help, "h", false, "Display usage")
flag.StringVar(&conf, "c", CONFFILE, "Configuration file")
flag.Parse()
if help {
usage()
}
app = &AppConfig{ProgName: APPNAME, Version: APPVER, Pid: os.Getpid()}
stat = &AppStat{HostName: app.HostName}
var err error
if err = parseConfig(conf); err != nil {
fatal(err.Error())
}
if err = setupLog(debug); err != nil {
fatal(err.Error())
}
go sigHandler()
event(loginfo, li, "%v-%v server started: %v", app.ProgName, app.Version,
app.HostName)
if err = setupServer(); err != nil {
fatal(err.Error())
}
if err = serverInfo(); err != nil {
fatal(err.Error())
}
var pid = fmt.Sprintf("%v", app.Pid)
if err = ioutil.WriteFile(PIDFILE, []byte(pid), 0644); err != nil {
fatal(err.Error())
}
select {}
}
func sigHandler() {
var quit bool
var c = make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGINT, syscall.SIGTERM)
var signal = <-c
event(lognotice, li, "Signal received: "+signal.String())
switch signal {
case syscall.SIGINT, syscall.SIGTERM:
quit = true
}
if quit | {
event(lognotice, li, "Terminating..")
os.Remove(PIDFILE)
os.Exit(0)
} | conditional_block |
|
digits_DG_Gphi_projection.py | d(c_in, c_out, 3, stride=1, padding=1)
self.relu = nn.ReLU(True)
def forward(self, x):
return self.relu(self.conv(x))
class ConvNet(Backbone):
def __init__(self, c_hidden=64):
super().__init__()
self.conv1 = Convolution(3, c_hidden)
self.conv2 = Convolution(c_hidden, c_hidden)
self.conv3 = Convolution(c_hidden, c_hidden)
self.conv4 = Convolution(c_hidden, c_hidden)
self._out_features = 2**2 * c_hidden
def _check_input(self, x):
H, W = x.shape[2:]
assert H == 32 and W == 32, 'Input to network must be 32x32, ' 'but got {}x{}'.format(H, W)
def forward(self, x):
self._check_input(x)
x = self.conv1(x)
x = F.max_pool2d(x, 2)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.conv3(x)
x = F.max_pool2d(x, 2)
x = self.conv4(x)
x = F.max_pool2d(x, 2)
return x.view(x.size(0), -1)
class DGdata(Dataset):
def __init__(self, root_dir, image_size, domains=None, transform = None):
self.root_dir = root_dir
if root_dir[-1] != "/":
self.root_dir = self.root_dir + "/"
self.categories = ['0', '1', '2', '3', '4', '5', '6','7', '8', '9']
if domains is None:
self.domains = ["mnist", "mnist_m", "svhn", "syn"]
else:
self.domains = domains
if transform is None:
self.transform = transforms.ToTensor()
else:
self.transform = transform
# make a list of all the files in the root_dir
# and read the labels
self.img_files = []
self.labels = []
self.domain_labels = []
for domain in self.domains:
for category in self.categories:
for image in os.listdir(self.root_dir+domain+'/'+category):
self.img_files.append(image)
self.labels.append(self.categories.index(category))
self.domain_labels.append(self.domains.index(domain))
def __len__(self):
return len(self.img_files)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_path = self.root_dir + self.domains[self.domain_labels[idx]] + "/" + self.categories[self.labels[idx]] + "/" + self.img_files[idx]
image = PIL.Image.open(img_path)
label = self.labels[idx]
return self.transform(image), label
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class UnFlatten(nn.Module):
def forward(self, input, size=64):
return input.view(input.size(0), size, 1, 1)
class VAE_Digits(nn.Module):
def __init__(self, image_channels=1, h_dim=64, z_dim=32):
super(VAE_Digits, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(image_channels, 4, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(4, 8, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
Flatten()
)
self.fc1 = nn.Linear(h_dim, z_dim)
self.fc2 = nn.Linear(h_dim, z_dim)
self.fc3 = nn.Linear(z_dim, h_dim)
self.decoder = nn.Sequential(
UnFlatten(),
nn.ConvTranspose2d(h_dim, 16, kernel_size=2, stride=1),
nn.ReLU(),
nn.ConvTranspose2d(16, 8, kernel_size=2, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(8, 4, kernel_size=2, stride=2),
nn.Sigmoid(),
)
def reparameterize(self, mu, logvar):
std = torch.exp(0.5*logvar)
eps = torch.randn_like(std)
z = mu + eps*std
return z
def bottleneck(self, h):
mu, logvar = self.fc1(h), self.fc2(h)
z = self.reparameterize(mu, logvar)
return z, mu, logvar
def encode(self, x):
x = x.view(-1, 1, 16,16)
h = self.encoder(x)
z, mu, logvar = self.bottleneck(h)
return z, mu, logvar
def decode(self, z):
z = self.fc3(z)
z = self.decoder(z)
return z.view(-1, 1, 16,16)
def forward(self, x):
z, mu, logvar = self.encode(x)
z = self.decode(z)
return z, mu, logvar
digits_fnet = ConvNet(c_hidden=64)
checkpoint = torch.load('../Models/digits_fnet.pt')
digits_fnet.load_state_dict(checkpoint['model_state_dict'])
digits_fnet = digits_fnet.to(dev)
layers = []
layers.append(nn.Linear(FEATURE_DIM, CLASSES))
classifier = torch.nn.Sequential(*layers).to(dev)
CELoss = nn.CrossEntropyLoss()
classifier = classifier.to(dev)
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
ds = DGdata(".", IMAGE_SIZE, [src_path], transform=data_transforms)
dataloader = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
digits_fnet.eval()
opt = torch.optim.Adam(classifier.parameters(), lr=0.003)
for epoch in range(15):
step_wise_loss = []
step_wise_accuracy = []
for image_batch, labels in (dataloader):
image_batch = image_batch.float()
if dev is not None:
image_batch, labels = image_batch.to(dev), labels.to(dev)
# zero the parameter gradients
opt.zero_grad()
z = digits_fnet(image_batch).to(dev)
pred = classifier(z)
loss = CELoss(pred, labels)
accuracy = (pred.argmax(dim=1) == labels).float().sum()/pred.shape[0]
loss.backward()
opt.step()
step_wise_loss.append(loss.detach().cpu().numpy())
step_wise_accuracy.append(accuracy.detach().cpu().numpy())
print("Epoch " + str(epoch) + " Loss " + str(np.mean(step_wise_loss)) + " Accuracy " + str(np.mean(step_wise_accuracy)))
vae = VAE_Digits().to(dev)
VAEoptim = LARS(torch.optim.SGD(vae.parameters(), lr=0.005))
dataloader_vae = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
#modified loss
def loss_function(recon_x, x, mu, logvar):
l2 = F.mse_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
l1 = F.l1_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
return l1 + l2 + KLD
def trainVAE(epoch):
vae.train()
train_loss = 0
print(epoch)
for batch_idx, (image_batch, _) in enumerate(dataloader_vae):
image_batch = image_batch.float()
image_batch = image_batch.to(dev) | #print(h.shape)
h = h.view(-1, 1, 16,16)
#print(h.shape)
h=h.detach()
recon_batch, mu, logvar = vae(h)
loss = loss_function(recon_batch, h, mu, logvar)
loss.backward()
train_loss += loss.item()
VAEoptim.step()
print('====> Epoch: {} Average loss: {:.4f}'.format(
epoch, train_loss / len(dataloader_vae.dataset)))
for epoch in range(1, 150):
trainVAE(epoch)
if (epoch)%10 == 0:
torch.save({'epoch' : epoch,
'model_state_dict': vae.state_dict(),
'optimizer_state_dict': VAEoptim.state_dict()
}, CHECKPOINT_DIR+"VAEepoch_digits_"+str(epoch)+".pt")
############################################ inference - target projection ##############################################################
vae.eval()
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
test_data = DGdata(".", IMAGE_SIZE, [target_path | VAEoptim.zero_grad()
h = digits_fnet(image_batch).to(dev) | random_line_split |
digits_DG_Gphi_projection.py | d(c_in, c_out, 3, stride=1, padding=1)
self.relu = nn.ReLU(True)
def forward(self, x):
return self.relu(self.conv(x))
class ConvNet(Backbone):
def __init__(self, c_hidden=64):
super().__init__()
self.conv1 = Convolution(3, c_hidden)
self.conv2 = Convolution(c_hidden, c_hidden)
self.conv3 = Convolution(c_hidden, c_hidden)
self.conv4 = Convolution(c_hidden, c_hidden)
self._out_features = 2**2 * c_hidden
def _check_input(self, x):
H, W = x.shape[2:]
assert H == 32 and W == 32, 'Input to network must be 32x32, ' 'but got {}x{}'.format(H, W)
def forward(self, x):
self._check_input(x)
x = self.conv1(x)
x = F.max_pool2d(x, 2)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.conv3(x)
x = F.max_pool2d(x, 2)
x = self.conv4(x)
x = F.max_pool2d(x, 2)
return x.view(x.size(0), -1)
class DGdata(Dataset):
def __init__(self, root_dir, image_size, domains=None, transform = None):
self.root_dir = root_dir
if root_dir[-1] != "/":
self.root_dir = self.root_dir + "/"
self.categories = ['0', '1', '2', '3', '4', '5', '6','7', '8', '9']
if domains is None:
self.domains = ["mnist", "mnist_m", "svhn", "syn"]
else:
self.domains = domains
if transform is None:
self.transform = transforms.ToTensor()
else:
self.transform = transform
# make a list of all the files in the root_dir
# and read the labels
self.img_files = []
self.labels = []
self.domain_labels = []
for domain in self.domains:
for category in self.categories:
for image in os.listdir(self.root_dir+domain+'/'+category):
self.img_files.append(image)
self.labels.append(self.categories.index(category))
self.domain_labels.append(self.domains.index(domain))
def __len__(self):
return len(self.img_files)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_path = self.root_dir + self.domains[self.domain_labels[idx]] + "/" + self.categories[self.labels[idx]] + "/" + self.img_files[idx]
image = PIL.Image.open(img_path)
label = self.labels[idx]
return self.transform(image), label
class | (nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class UnFlatten(nn.Module):
def forward(self, input, size=64):
return input.view(input.size(0), size, 1, 1)
class VAE_Digits(nn.Module):
def __init__(self, image_channels=1, h_dim=64, z_dim=32):
super(VAE_Digits, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(image_channels, 4, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(4, 8, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
Flatten()
)
self.fc1 = nn.Linear(h_dim, z_dim)
self.fc2 = nn.Linear(h_dim, z_dim)
self.fc3 = nn.Linear(z_dim, h_dim)
self.decoder = nn.Sequential(
UnFlatten(),
nn.ConvTranspose2d(h_dim, 16, kernel_size=2, stride=1),
nn.ReLU(),
nn.ConvTranspose2d(16, 8, kernel_size=2, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(8, 4, kernel_size=2, stride=2),
nn.Sigmoid(),
)
def reparameterize(self, mu, logvar):
std = torch.exp(0.5*logvar)
eps = torch.randn_like(std)
z = mu + eps*std
return z
def bottleneck(self, h):
mu, logvar = self.fc1(h), self.fc2(h)
z = self.reparameterize(mu, logvar)
return z, mu, logvar
def encode(self, x):
x = x.view(-1, 1, 16,16)
h = self.encoder(x)
z, mu, logvar = self.bottleneck(h)
return z, mu, logvar
def decode(self, z):
z = self.fc3(z)
z = self.decoder(z)
return z.view(-1, 1, 16,16)
def forward(self, x):
z, mu, logvar = self.encode(x)
z = self.decode(z)
return z, mu, logvar
digits_fnet = ConvNet(c_hidden=64)
checkpoint = torch.load('../Models/digits_fnet.pt')
digits_fnet.load_state_dict(checkpoint['model_state_dict'])
digits_fnet = digits_fnet.to(dev)
layers = []
layers.append(nn.Linear(FEATURE_DIM, CLASSES))
classifier = torch.nn.Sequential(*layers).to(dev)
CELoss = nn.CrossEntropyLoss()
classifier = classifier.to(dev)
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
ds = DGdata(".", IMAGE_SIZE, [src_path], transform=data_transforms)
dataloader = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
digits_fnet.eval()
opt = torch.optim.Adam(classifier.parameters(), lr=0.003)
for epoch in range(15):
step_wise_loss = []
step_wise_accuracy = []
for image_batch, labels in (dataloader):
image_batch = image_batch.float()
if dev is not None:
image_batch, labels = image_batch.to(dev), labels.to(dev)
# zero the parameter gradients
opt.zero_grad()
z = digits_fnet(image_batch).to(dev)
pred = classifier(z)
loss = CELoss(pred, labels)
accuracy = (pred.argmax(dim=1) == labels).float().sum()/pred.shape[0]
loss.backward()
opt.step()
step_wise_loss.append(loss.detach().cpu().numpy())
step_wise_accuracy.append(accuracy.detach().cpu().numpy())
print("Epoch " + str(epoch) + " Loss " + str(np.mean(step_wise_loss)) + " Accuracy " + str(np.mean(step_wise_accuracy)))
vae = VAE_Digits().to(dev)
VAEoptim = LARS(torch.optim.SGD(vae.parameters(), lr=0.005))
dataloader_vae = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
#modified loss
def loss_function(recon_x, x, mu, logvar):
l2 = F.mse_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
l1 = F.l1_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
return l1 + l2 + KLD
def trainVAE(epoch):
vae.train()
train_loss = 0
print(epoch)
for batch_idx, (image_batch, _) in enumerate(dataloader_vae):
image_batch = image_batch.float()
image_batch = image_batch.to(dev)
VAEoptim.zero_grad()
h = digits_fnet(image_batch).to(dev)
#print(h.shape)
h = h.view(-1, 1, 16,16)
#print(h.shape)
h=h.detach()
recon_batch, mu, logvar = vae(h)
loss = loss_function(recon_batch, h, mu, logvar)
loss.backward()
train_loss += loss.item()
VAEoptim.step()
print('====> Epoch: {} Average loss: {:.4f}'.format(
epoch, train_loss / len(dataloader_vae.dataset)))
for epoch in range(1, 150):
trainVAE(epoch)
if (epoch)%10 == 0:
torch.save({'epoch' : epoch,
'model_state_dict': vae.state_dict(),
'optimizer_state_dict': VAEoptim.state_dict()
}, CHECKPOINT_DIR+"VAEepoch_digits_"+str(epoch)+".pt")
############################################ inference - target projection ##############################################################
vae.eval()
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
test_data = DGdata(".", IMAGE_SIZE, [ | Flatten | identifier_name |
digits_DG_Gphi_projection.py | d(c_in, c_out, 3, stride=1, padding=1)
self.relu = nn.ReLU(True)
def forward(self, x):
return self.relu(self.conv(x))
class ConvNet(Backbone):
def __init__(self, c_hidden=64):
super().__init__()
self.conv1 = Convolution(3, c_hidden)
self.conv2 = Convolution(c_hidden, c_hidden)
self.conv3 = Convolution(c_hidden, c_hidden)
self.conv4 = Convolution(c_hidden, c_hidden)
self._out_features = 2**2 * c_hidden
def _check_input(self, x):
H, W = x.shape[2:]
assert H == 32 and W == 32, 'Input to network must be 32x32, ' 'but got {}x{}'.format(H, W)
def forward(self, x):
self._check_input(x)
x = self.conv1(x)
x = F.max_pool2d(x, 2)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.conv3(x)
x = F.max_pool2d(x, 2)
x = self.conv4(x)
x = F.max_pool2d(x, 2)
return x.view(x.size(0), -1)
class DGdata(Dataset):
def __init__(self, root_dir, image_size, domains=None, transform = None):
self.root_dir = root_dir
if root_dir[-1] != "/":
self.root_dir = self.root_dir + "/"
self.categories = ['0', '1', '2', '3', '4', '5', '6','7', '8', '9']
if domains is None:
self.domains = ["mnist", "mnist_m", "svhn", "syn"]
else:
self.domains = domains
if transform is None:
|
else:
self.transform = transform
# make a list of all the files in the root_dir
# and read the labels
self.img_files = []
self.labels = []
self.domain_labels = []
for domain in self.domains:
for category in self.categories:
for image in os.listdir(self.root_dir+domain+'/'+category):
self.img_files.append(image)
self.labels.append(self.categories.index(category))
self.domain_labels.append(self.domains.index(domain))
def __len__(self):
return len(self.img_files)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_path = self.root_dir + self.domains[self.domain_labels[idx]] + "/" + self.categories[self.labels[idx]] + "/" + self.img_files[idx]
image = PIL.Image.open(img_path)
label = self.labels[idx]
return self.transform(image), label
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class UnFlatten(nn.Module):
def forward(self, input, size=64):
return input.view(input.size(0), size, 1, 1)
class VAE_Digits(nn.Module):
def __init__(self, image_channels=1, h_dim=64, z_dim=32):
super(VAE_Digits, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(image_channels, 4, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(4, 8, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
Flatten()
)
self.fc1 = nn.Linear(h_dim, z_dim)
self.fc2 = nn.Linear(h_dim, z_dim)
self.fc3 = nn.Linear(z_dim, h_dim)
self.decoder = nn.Sequential(
UnFlatten(),
nn.ConvTranspose2d(h_dim, 16, kernel_size=2, stride=1),
nn.ReLU(),
nn.ConvTranspose2d(16, 8, kernel_size=2, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(8, 4, kernel_size=2, stride=2),
nn.Sigmoid(),
)
def reparameterize(self, mu, logvar):
std = torch.exp(0.5*logvar)
eps = torch.randn_like(std)
z = mu + eps*std
return z
def bottleneck(self, h):
mu, logvar = self.fc1(h), self.fc2(h)
z = self.reparameterize(mu, logvar)
return z, mu, logvar
def encode(self, x):
x = x.view(-1, 1, 16,16)
h = self.encoder(x)
z, mu, logvar = self.bottleneck(h)
return z, mu, logvar
def decode(self, z):
z = self.fc3(z)
z = self.decoder(z)
return z.view(-1, 1, 16,16)
def forward(self, x):
z, mu, logvar = self.encode(x)
z = self.decode(z)
return z, mu, logvar
digits_fnet = ConvNet(c_hidden=64)
checkpoint = torch.load('../Models/digits_fnet.pt')
digits_fnet.load_state_dict(checkpoint['model_state_dict'])
digits_fnet = digits_fnet.to(dev)
layers = []
layers.append(nn.Linear(FEATURE_DIM, CLASSES))
classifier = torch.nn.Sequential(*layers).to(dev)
CELoss = nn.CrossEntropyLoss()
classifier = classifier.to(dev)
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
ds = DGdata(".", IMAGE_SIZE, [src_path], transform=data_transforms)
dataloader = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
digits_fnet.eval()
opt = torch.optim.Adam(classifier.parameters(), lr=0.003)
for epoch in range(15):
step_wise_loss = []
step_wise_accuracy = []
for image_batch, labels in (dataloader):
image_batch = image_batch.float()
if dev is not None:
image_batch, labels = image_batch.to(dev), labels.to(dev)
# zero the parameter gradients
opt.zero_grad()
z = digits_fnet(image_batch).to(dev)
pred = classifier(z)
loss = CELoss(pred, labels)
accuracy = (pred.argmax(dim=1) == labels).float().sum()/pred.shape[0]
loss.backward()
opt.step()
step_wise_loss.append(loss.detach().cpu().numpy())
step_wise_accuracy.append(accuracy.detach().cpu().numpy())
print("Epoch " + str(epoch) + " Loss " + str(np.mean(step_wise_loss)) + " Accuracy " + str(np.mean(step_wise_accuracy)))
vae = VAE_Digits().to(dev)
VAEoptim = LARS(torch.optim.SGD(vae.parameters(), lr=0.005))
dataloader_vae = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
#modified loss
def loss_function(recon_x, x, mu, logvar):
l2 = F.mse_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
l1 = F.l1_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
return l1 + l2 + KLD
def trainVAE(epoch):
vae.train()
train_loss = 0
print(epoch)
for batch_idx, (image_batch, _) in enumerate(dataloader_vae):
image_batch = image_batch.float()
image_batch = image_batch.to(dev)
VAEoptim.zero_grad()
h = digits_fnet(image_batch).to(dev)
#print(h.shape)
h = h.view(-1, 1, 16,16)
#print(h.shape)
h=h.detach()
recon_batch, mu, logvar = vae(h)
loss = loss_function(recon_batch, h, mu, logvar)
loss.backward()
train_loss += loss.item()
VAEoptim.step()
print('====> Epoch: {} Average loss: {:.4f}'.format(
epoch, train_loss / len(dataloader_vae.dataset)))
for epoch in range(1, 150):
trainVAE(epoch)
if (epoch)%10 == 0:
torch.save({'epoch' : epoch,
'model_state_dict': vae.state_dict(),
'optimizer_state_dict': VAEoptim.state_dict()
}, CHECKPOINT_DIR+"VAEepoch_digits_"+str(epoch)+".pt")
############################################ inference - target projection ##############################################################
vae.eval()
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
test_data = DGdata(".", IMAGE_SIZE, [target | self.transform = transforms.ToTensor() | conditional_block |
digits_DG_Gphi_projection.py |
class ConvNet(Backbone):
def __init__(self, c_hidden=64):
super().__init__()
self.conv1 = Convolution(3, c_hidden)
self.conv2 = Convolution(c_hidden, c_hidden)
self.conv3 = Convolution(c_hidden, c_hidden)
self.conv4 = Convolution(c_hidden, c_hidden)
self._out_features = 2**2 * c_hidden
def _check_input(self, x):
H, W = x.shape[2:]
assert H == 32 and W == 32, 'Input to network must be 32x32, ' 'but got {}x{}'.format(H, W)
def forward(self, x):
self._check_input(x)
x = self.conv1(x)
x = F.max_pool2d(x, 2)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.conv3(x)
x = F.max_pool2d(x, 2)
x = self.conv4(x)
x = F.max_pool2d(x, 2)
return x.view(x.size(0), -1)
class DGdata(Dataset):
def __init__(self, root_dir, image_size, domains=None, transform = None):
self.root_dir = root_dir
if root_dir[-1] != "/":
self.root_dir = self.root_dir + "/"
self.categories = ['0', '1', '2', '3', '4', '5', '6','7', '8', '9']
if domains is None:
self.domains = ["mnist", "mnist_m", "svhn", "syn"]
else:
self.domains = domains
if transform is None:
self.transform = transforms.ToTensor()
else:
self.transform = transform
# make a list of all the files in the root_dir
# and read the labels
self.img_files = []
self.labels = []
self.domain_labels = []
for domain in self.domains:
for category in self.categories:
for image in os.listdir(self.root_dir+domain+'/'+category):
self.img_files.append(image)
self.labels.append(self.categories.index(category))
self.domain_labels.append(self.domains.index(domain))
def __len__(self):
return len(self.img_files)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_path = self.root_dir + self.domains[self.domain_labels[idx]] + "/" + self.categories[self.labels[idx]] + "/" + self.img_files[idx]
image = PIL.Image.open(img_path)
label = self.labels[idx]
return self.transform(image), label
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
class UnFlatten(nn.Module):
def forward(self, input, size=64):
return input.view(input.size(0), size, 1, 1)
class VAE_Digits(nn.Module):
def __init__(self, image_channels=1, h_dim=64, z_dim=32):
super(VAE_Digits, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(image_channels, 4, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(4, 8, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, kernel_size=3, stride=2,padding=1),
nn.ReLU(),
Flatten()
)
self.fc1 = nn.Linear(h_dim, z_dim)
self.fc2 = nn.Linear(h_dim, z_dim)
self.fc3 = nn.Linear(z_dim, h_dim)
self.decoder = nn.Sequential(
UnFlatten(),
nn.ConvTranspose2d(h_dim, 16, kernel_size=2, stride=1),
nn.ReLU(),
nn.ConvTranspose2d(16, 8, kernel_size=2, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(8, 4, kernel_size=2, stride=2),
nn.Sigmoid(),
)
def reparameterize(self, mu, logvar):
std = torch.exp(0.5*logvar)
eps = torch.randn_like(std)
z = mu + eps*std
return z
def bottleneck(self, h):
mu, logvar = self.fc1(h), self.fc2(h)
z = self.reparameterize(mu, logvar)
return z, mu, logvar
def encode(self, x):
x = x.view(-1, 1, 16,16)
h = self.encoder(x)
z, mu, logvar = self.bottleneck(h)
return z, mu, logvar
def decode(self, z):
z = self.fc3(z)
z = self.decoder(z)
return z.view(-1, 1, 16,16)
def forward(self, x):
z, mu, logvar = self.encode(x)
z = self.decode(z)
return z, mu, logvar
digits_fnet = ConvNet(c_hidden=64)
checkpoint = torch.load('../Models/digits_fnet.pt')
digits_fnet.load_state_dict(checkpoint['model_state_dict'])
digits_fnet = digits_fnet.to(dev)
layers = []
layers.append(nn.Linear(FEATURE_DIM, CLASSES))
classifier = torch.nn.Sequential(*layers).to(dev)
CELoss = nn.CrossEntropyLoss()
classifier = classifier.to(dev)
data_transforms = transforms.Compose([transforms.RandomResizedCrop(size=IMAGE_SIZE), transforms.ToTensor()] )
ds = DGdata(".", IMAGE_SIZE, [src_path], transform=data_transforms)
dataloader = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
digits_fnet.eval()
opt = torch.optim.Adam(classifier.parameters(), lr=0.003)
for epoch in range(15):
step_wise_loss = []
step_wise_accuracy = []
for image_batch, labels in (dataloader):
image_batch = image_batch.float()
if dev is not None:
image_batch, labels = image_batch.to(dev), labels.to(dev)
# zero the parameter gradients
opt.zero_grad()
z = digits_fnet(image_batch).to(dev)
pred = classifier(z)
loss = CELoss(pred, labels)
accuracy = (pred.argmax(dim=1) == labels).float().sum()/pred.shape[0]
loss.backward()
opt.step()
step_wise_loss.append(loss.detach().cpu().numpy())
step_wise_accuracy.append(accuracy.detach().cpu().numpy())
print("Epoch " + str(epoch) + " Loss " + str(np.mean(step_wise_loss)) + " Accuracy " + str(np.mean(step_wise_accuracy)))
vae = VAE_Digits().to(dev)
VAEoptim = LARS(torch.optim.SGD(vae.parameters(), lr=0.005))
dataloader_vae = DataLoader(ds, batch_size=64, shuffle=True, num_workers = 4)
#modified loss
def loss_function(recon_x, x, mu, logvar):
l2 = F.mse_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
l1 = F.l1_loss(recon_x, x.view(-1, 1, 16, 16), reduction='mean')
return l1 + l2 + KLD
def trainVAE(epoch):
vae.train()
train_loss = 0
print(epoch)
for batch_idx, (image_batch, _) in enumerate(dataloader_vae):
image_batch = image_batch.float()
image_batch = image_batch.to(dev)
VAEoptim.zero_grad()
h = digits_fnet(image_batch).to(dev)
#print(h.shape)
h = h.view(-1, 1, 16,16)
#print(h.shape)
h=h.detach()
recon_batch, mu, logvar = vae(h)
loss = loss_function(recon_batch, h, mu, logvar)
loss.backward()
train_loss += loss.item()
VAEoptim.step()
print('====> Epoch: {} Average loss: {:.4f}'.format(
epoch, train_loss / len(dataloader_vae.dataset)))
for epoch in range(1, 150):
trainVAE(epoch)
if (epoch)%10 == 0:
torch.save({'epoch' : epoch,
'model_state_dict': vae.state_dict(),
'optimizer_state_dict': VAEoptim.state_dict()
}, CHECKPOINT_DIR+"VAEepoch_digits_"+str(epoch)+".pt")
############################################ inference - target projection ##############################################################
vae.eval()
data_transforms = transforms.Compose([transforms.Random | def __init__(self, c_in, c_out):
super().__init__()
self.conv = nn.Conv2d(c_in, c_out, 3, stride=1, padding=1)
self.relu = nn.ReLU(True)
def forward(self, x):
return self.relu(self.conv(x)) | identifier_body |
|
script.js | -nav-parent-active')
panel.css('max-height',panel.prop('scrollHeight') + "px")
} else {
panel.attr('style','');
$(this).toggleClass('links-nav-parent-active')
}
}
})
//menu-script
$('.m-nav-icon').on('click',function(event){
$('.header-overley').addClass('show-overlay');
$('body').addClass('stop-scroll');
$('.m-nav-hiiden').addClass('m-nav-hiiden-active')
})
$('.close-menu-icon, .header-overley').on('click', function(){
$('.header-overley').removeClass('show-overlay');
$('body').removeClass('stop-scroll');
$('.m-nav-hiiden').removeClass('m-nav-hiiden-active')
})
//get scroll-width
function getScrollBarWidth () {
var $outer = $('<div>').css({visibility: 'hidden', width: 100, overflow: 'scroll'}).appendTo('body'),
widthWithScroll = $('<div>').css({width: '100%'}).appendTo($outer).outerWidth();
$outer.remove();
return 100 - widthWithScroll;
};
//tabs
$(".tab-t").on("click",function(e){
$('.tab-t').removeClass('tab-t-active')
$(this).addClass('tab-t-active');
var activeWidth = $(this).innerWidth();
var itemPos = $(this).position();
$(".tab-t-selector").css({
"left":itemPos.left + "px",
"width": activeWidth + "px"
});
$('.offer').removeClass('active-tab');
$('[data-tab='+$(this).attr('data-select-tab')+']').addClass('active-tab');
})
if($('div').is(".tab-t-selector")){
$(".tab-t-selector").css({
"left":$('.tab-t-active').position().left + "px",
"width": $('.tab-t-active').innerWidth() + "px"
});
}
//price-tabs
$('.img-tab-c-th-img').on("click",function(e){
$(this).closest('.img-tab-c-th').next().find('img').attr('src',$(this).attr('data-full'))
$('.img-tab-c-th-img').removeClass('active-img-tab-c-th-img');
$(this).addClass('active-img-tab-c-th-img');
});
//menu
if($(window).width() > 992) {
$(window).scroll(function(){
if($(this).scrollTop()>400){
$('.site-header').addClass('fixed-header');
$('body').addClass('nav-fixed');
}
if ($(this).scrollTop()<350 && $('.site-header').hasClass('fixed-header')){
$('.site-header').addClass('header-animated');
}
if ($(this).scrollTop()>350 && $('.site-header').hasClass('header-animated')){
$('.site-header').removeClass('header-animated');
}
if ($(this).scrollTop() < 200){
$('.site-header').removeClass('fixed-header');
$('.site-header').removeClass('header-animated');
$('body').removeClass('nav-fixed');
}
});
}
//modal
function hideModal(){
$('[data-modal]').removeClass('visible-modal');
$('.modal-overley').removeClass('modal-overley-show');
setTimeout(function(){
$('body').removeClass('stop-scroll');
$('body').css('padding-right',0+'px');
$('.site-header').css('padding-right',0+'px')
}, 300);
}
$(".close-modal, .modal-overley").on("click",function(e){
hideModal()
})
$(document).keydown(function(eventObject){
if (eventObject.which == 27)
hideModal()
});
$('[data-modal-open]').on("click",function(e){
event.preventDefault()
$('[data-modal='+ $(this).attr('data-modal-open') +']').addClass('visible-modal')
if($(this).attr('data-modal-open')==1){
$('[data-modal=1] .modal-header-t').text($(this).prev('.cb-c-text').find('.bloc-t').text());
$('[data-modal=1] .modal-hero-descr').text($(this).prev('.cb-c-text').find('.descr').text());
$('[data-modal=1] .modal-hero-img').attr('src',$(this).next('.cb-c-img').attr('src'));
}
if($(this).attr('data-modal-open')==3){
$('[data-modal=3] .centured-m-t').text($(this).parentsUntil('.hf-t').find('h1').text());
$('[data-modal=3] .centured-m-d').text($(this).parentsUntil('.hf-t').find('.hero-content p:first-child').text());
$('[data-modal=3] .main-request-img').attr('src',$('.hf-img img').attr('src'));
}
$('.modal-overley').addClass('modal-overley-show');
$('body').addClass('stop-scroll');
$('body').css('padding-right',getScrollBarWidth ()+'px');
$('.site-header').css('padding-right',getScrollBarWidth ()+'px');
})
// centured-modal-close
$('.centured-modal').on("click",function(e){
hideModal()
}).children()
.click(function(e){
e.stopPropagation();
})
| if($('div').is('.post-nav-container')){
if($(window).width() > 768){
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-links .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
//post-nav-active
$(window).scroll(function(){
var $sections = $('.sp-text-col h2,.sp-text-col h3,.sp-text-col h4');
$sections.each(function(i,el){
var top = $(el).offset().top;
console.log($(window).scrollTop())
var id = $(el).attr('id');
if( (top - $(window).scrollTop()) > 0 && (top - $(window).scrollTop()) < 120){
$('.post-nav-link-active').removeClass('post-nav-link-active');
$('a[href="#'+id+'"]').addClass('post-nav-link-active');
$(".post-nav-selector").css({
"top":$('a[href="#'+id+'"]').position().top + "px",
"height": $('a[href="#'+id+'"]').innerHeight() + "px"
});
}
})
});
}else{
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-mobile-nav .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
}
}
//like-button
$('.sp-like-button').on("click",function(e){
$(this).addClass('sp-like-button-liked');
$('.sp-l-text').text(Number.parseInt($('.sp-l-text').text())+1);
})
//quiz
var curentStage=1;
$('.cond-quiz-counter-curent').text(curentStage);
$('.cond-quiz-footer-next').prop('disabled',true)
$('.count-line-counter').css('width',(100 / $('.cond-quiz-body').length) + '%')
$('.c-check input').on("change",function(e){
if($('.c-check input:checked').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.c-q-c-c-item').on("click",function(e){
$(this).toggleClass('c-q-c-c-item-selected')
if($('.c-q-c-c-item-selected').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.cond-quiz-footer-next').on("click",function(e){
if(curentStage==1){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--1]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--1]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==2){
$('.c-q-c-c-item-selected').each(function(i) {
var values = $('[data-field-id=field--2]').val();
values += $(this).find('.c-q-c-c-item-descr').text();
$('[data-field-id=field--2]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==3){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--3]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--3]').val(values.replace(/\s/g, '')+',');
| //post-nav | random_line_split |
script.js | -parent-active')
panel.css('max-height',panel.prop('scrollHeight') + "px")
} else {
panel.attr('style','');
$(this).toggleClass('links-nav-parent-active')
}
}
})
//menu-script
$('.m-nav-icon').on('click',function(event){
$('.header-overley').addClass('show-overlay');
$('body').addClass('stop-scroll');
$('.m-nav-hiiden').addClass('m-nav-hiiden-active')
})
$('.close-menu-icon, .header-overley').on('click', function(){
$('.header-overley').removeClass('show-overlay');
$('body').removeClass('stop-scroll');
$('.m-nav-hiiden').removeClass('m-nav-hiiden-active')
})
//get scroll-width
function | () {
var $outer = $('<div>').css({visibility: 'hidden', width: 100, overflow: 'scroll'}).appendTo('body'),
widthWithScroll = $('<div>').css({width: '100%'}).appendTo($outer).outerWidth();
$outer.remove();
return 100 - widthWithScroll;
};
//tabs
$(".tab-t").on("click",function(e){
$('.tab-t').removeClass('tab-t-active')
$(this).addClass('tab-t-active');
var activeWidth = $(this).innerWidth();
var itemPos = $(this).position();
$(".tab-t-selector").css({
"left":itemPos.left + "px",
"width": activeWidth + "px"
});
$('.offer').removeClass('active-tab');
$('[data-tab='+$(this).attr('data-select-tab')+']').addClass('active-tab');
})
if($('div').is(".tab-t-selector")){
$(".tab-t-selector").css({
"left":$('.tab-t-active').position().left + "px",
"width": $('.tab-t-active').innerWidth() + "px"
});
}
//price-tabs
$('.img-tab-c-th-img').on("click",function(e){
$(this).closest('.img-tab-c-th').next().find('img').attr('src',$(this).attr('data-full'))
$('.img-tab-c-th-img').removeClass('active-img-tab-c-th-img');
$(this).addClass('active-img-tab-c-th-img');
});
//menu
if($(window).width() > 992) {
$(window).scroll(function(){
if($(this).scrollTop()>400){
$('.site-header').addClass('fixed-header');
$('body').addClass('nav-fixed');
}
if ($(this).scrollTop()<350 && $('.site-header').hasClass('fixed-header')){
$('.site-header').addClass('header-animated');
}
if ($(this).scrollTop()>350 && $('.site-header').hasClass('header-animated')){
$('.site-header').removeClass('header-animated');
}
if ($(this).scrollTop() < 200){
$('.site-header').removeClass('fixed-header');
$('.site-header').removeClass('header-animated');
$('body').removeClass('nav-fixed');
}
});
}
//modal
function hideModal(){
$('[data-modal]').removeClass('visible-modal');
$('.modal-overley').removeClass('modal-overley-show');
setTimeout(function(){
$('body').removeClass('stop-scroll');
$('body').css('padding-right',0+'px');
$('.site-header').css('padding-right',0+'px')
}, 300);
}
$(".close-modal, .modal-overley").on("click",function(e){
hideModal()
})
$(document).keydown(function(eventObject){
if (eventObject.which == 27)
hideModal()
});
$('[data-modal-open]').on("click",function(e){
event.preventDefault()
$('[data-modal='+ $(this).attr('data-modal-open') +']').addClass('visible-modal')
if($(this).attr('data-modal-open')==1){
$('[data-modal=1] .modal-header-t').text($(this).prev('.cb-c-text').find('.bloc-t').text());
$('[data-modal=1] .modal-hero-descr').text($(this).prev('.cb-c-text').find('.descr').text());
$('[data-modal=1] .modal-hero-img').attr('src',$(this).next('.cb-c-img').attr('src'));
}
if($(this).attr('data-modal-open')==3){
$('[data-modal=3] .centured-m-t').text($(this).parentsUntil('.hf-t').find('h1').text());
$('[data-modal=3] .centured-m-d').text($(this).parentsUntil('.hf-t').find('.hero-content p:first-child').text());
$('[data-modal=3] .main-request-img').attr('src',$('.hf-img img').attr('src'));
}
$('.modal-overley').addClass('modal-overley-show');
$('body').addClass('stop-scroll');
$('body').css('padding-right',getScrollBarWidth ()+'px');
$('.site-header').css('padding-right',getScrollBarWidth ()+'px');
})
// centured-modal-close
$('.centured-modal').on("click",function(e){
hideModal()
}).children()
.click(function(e){
e.stopPropagation();
})
//post-nav
if($('div').is('.post-nav-container')){
if($(window).width() > 768){
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-links .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
//post-nav-active
$(window).scroll(function(){
var $sections = $('.sp-text-col h2,.sp-text-col h3,.sp-text-col h4');
$sections.each(function(i,el){
var top = $(el).offset().top;
console.log($(window).scrollTop())
var id = $(el).attr('id');
if( (top - $(window).scrollTop()) > 0 && (top - $(window).scrollTop()) < 120){
$('.post-nav-link-active').removeClass('post-nav-link-active');
$('a[href="#'+id+'"]').addClass('post-nav-link-active');
$(".post-nav-selector").css({
"top":$('a[href="#'+id+'"]').position().top + "px",
"height": $('a[href="#'+id+'"]').innerHeight() + "px"
});
}
})
});
}else{
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-mobile-nav .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
}
}
//like-button
$('.sp-like-button').on("click",function(e){
$(this).addClass('sp-like-button-liked');
$('.sp-l-text').text(Number.parseInt($('.sp-l-text').text())+1);
})
//quiz
var curentStage=1;
$('.cond-quiz-counter-curent').text(curentStage);
$('.cond-quiz-footer-next').prop('disabled',true)
$('.count-line-counter').css('width',(100 / $('.cond-quiz-body').length) + '%')
$('.c-check input').on("change",function(e){
if($('.c-check input:checked').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.c-q-c-c-item').on("click",function(e){
$(this).toggleClass('c-q-c-c-item-selected')
if($('.c-q-c-c-item-selected').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.cond-quiz-footer-next').on("click",function(e){
if(curentStage==1){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--1]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--1]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==2){
$('.c-q-c-c-item-selected').each(function(i) {
var values = $('[data-field-id=field--2]').val();
values += $(this).find('.c-q-c-c-item-descr').text();
$('[data-field-id=field--2]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==3){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--3]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--3]').val(values.replace(/\s/g, '')+',');
| getScrollBarWidth | identifier_name |
script.js | -parent-active')
panel.css('max-height',panel.prop('scrollHeight') + "px")
} else {
panel.attr('style','');
$(this).toggleClass('links-nav-parent-active')
}
}
})
//menu-script
$('.m-nav-icon').on('click',function(event){
$('.header-overley').addClass('show-overlay');
$('body').addClass('stop-scroll');
$('.m-nav-hiiden').addClass('m-nav-hiiden-active')
})
$('.close-menu-icon, .header-overley').on('click', function(){
$('.header-overley').removeClass('show-overlay');
$('body').removeClass('stop-scroll');
$('.m-nav-hiiden').removeClass('m-nav-hiiden-active')
})
//get scroll-width
function getScrollBarWidth () {
var $outer = $('<div>').css({visibility: 'hidden', width: 100, overflow: 'scroll'}).appendTo('body'),
widthWithScroll = $('<div>').css({width: '100%'}).appendTo($outer).outerWidth();
$outer.remove();
return 100 - widthWithScroll;
};
//tabs
$(".tab-t").on("click",function(e){
$('.tab-t').removeClass('tab-t-active')
$(this).addClass('tab-t-active');
var activeWidth = $(this).innerWidth();
var itemPos = $(this).position();
$(".tab-t-selector").css({
"left":itemPos.left + "px",
"width": activeWidth + "px"
});
$('.offer').removeClass('active-tab');
$('[data-tab='+$(this).attr('data-select-tab')+']').addClass('active-tab');
})
if($('div').is(".tab-t-selector")){
$(".tab-t-selector").css({
"left":$('.tab-t-active').position().left + "px",
"width": $('.tab-t-active').innerWidth() + "px"
});
}
//price-tabs
$('.img-tab-c-th-img').on("click",function(e){
$(this).closest('.img-tab-c-th').next().find('img').attr('src',$(this).attr('data-full'))
$('.img-tab-c-th-img').removeClass('active-img-tab-c-th-img');
$(this).addClass('active-img-tab-c-th-img');
});
//menu
if($(window).width() > 992) {
$(window).scroll(function(){
if($(this).scrollTop()>400){
$('.site-header').addClass('fixed-header');
$('body').addClass('nav-fixed');
}
if ($(this).scrollTop()<350 && $('.site-header').hasClass('fixed-header')){
$('.site-header').addClass('header-animated');
}
if ($(this).scrollTop()>350 && $('.site-header').hasClass('header-animated')){
$('.site-header').removeClass('header-animated');
}
if ($(this).scrollTop() < 200){
$('.site-header').removeClass('fixed-header');
$('.site-header').removeClass('header-animated');
$('body').removeClass('nav-fixed');
}
});
}
//modal
function hideModal() |
$(".close-modal, .modal-overley").on("click",function(e){
hideModal()
})
$(document).keydown(function(eventObject){
if (eventObject.which == 27)
hideModal()
});
$('[data-modal-open]').on("click",function(e){
event.preventDefault()
$('[data-modal='+ $(this).attr('data-modal-open') +']').addClass('visible-modal')
if($(this).attr('data-modal-open')==1){
$('[data-modal=1] .modal-header-t').text($(this).prev('.cb-c-text').find('.bloc-t').text());
$('[data-modal=1] .modal-hero-descr').text($(this).prev('.cb-c-text').find('.descr').text());
$('[data-modal=1] .modal-hero-img').attr('src',$(this).next('.cb-c-img').attr('src'));
}
if($(this).attr('data-modal-open')==3){
$('[data-modal=3] .centured-m-t').text($(this).parentsUntil('.hf-t').find('h1').text());
$('[data-modal=3] .centured-m-d').text($(this).parentsUntil('.hf-t').find('.hero-content p:first-child').text());
$('[data-modal=3] .main-request-img').attr('src',$('.hf-img img').attr('src'));
}
$('.modal-overley').addClass('modal-overley-show');
$('body').addClass('stop-scroll');
$('body').css('padding-right',getScrollBarWidth ()+'px');
$('.site-header').css('padding-right',getScrollBarWidth ()+'px');
})
// centured-modal-close
$('.centured-modal').on("click",function(e){
hideModal()
}).children()
.click(function(e){
e.stopPropagation();
})
//post-nav
if($('div').is('.post-nav-container')){
if($(window).width() > 768){
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-links .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
//post-nav-active
$(window).scroll(function(){
var $sections = $('.sp-text-col h2,.sp-text-col h3,.sp-text-col h4');
$sections.each(function(i,el){
var top = $(el).offset().top;
console.log($(window).scrollTop())
var id = $(el).attr('id');
if( (top - $(window).scrollTop()) > 0 && (top - $(window).scrollTop()) < 120){
$('.post-nav-link-active').removeClass('post-nav-link-active');
$('a[href="#'+id+'"]').addClass('post-nav-link-active');
$(".post-nav-selector").css({
"top":$('a[href="#'+id+'"]').position().top + "px",
"height": $('a[href="#'+id+'"]').innerHeight() + "px"
});
}
})
});
}else{
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-mobile-nav .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
}
}
//like-button
$('.sp-like-button').on("click",function(e){
$(this).addClass('sp-like-button-liked');
$('.sp-l-text').text(Number.parseInt($('.sp-l-text').text())+1);
})
//quiz
var curentStage=1;
$('.cond-quiz-counter-curent').text(curentStage);
$('.cond-quiz-footer-next').prop('disabled',true)
$('.count-line-counter').css('width',(100 / $('.cond-quiz-body').length) + '%')
$('.c-check input').on("change",function(e){
if($('.c-check input:checked').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.c-q-c-c-item').on("click",function(e){
$(this).toggleClass('c-q-c-c-item-selected')
if($('.c-q-c-c-item-selected').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.cond-quiz-footer-next').on("click",function(e){
if(curentStage==1){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--1]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--1]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==2){
$('.c-q-c-c-item-selected').each(function(i) {
var values = $('[data-field-id=field--2]').val();
values += $(this).find('.c-q-c-c-item-descr').text();
$('[data-field-id=field--2]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==3){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--3]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--3]').val(values.replace(/\s/g, '')+', | {
$('[data-modal]').removeClass('visible-modal');
$('.modal-overley').removeClass('modal-overley-show');
setTimeout(function(){
$('body').removeClass('stop-scroll');
$('body').css('padding-right',0+'px');
$('.site-header').css('padding-right',0+'px')
}, 300);
} | identifier_body |
script.js | -parent-active')
panel.css('max-height',panel.prop('scrollHeight') + "px")
} else {
panel.attr('style','');
$(this).toggleClass('links-nav-parent-active')
}
}
})
//menu-script
$('.m-nav-icon').on('click',function(event){
$('.header-overley').addClass('show-overlay');
$('body').addClass('stop-scroll');
$('.m-nav-hiiden').addClass('m-nav-hiiden-active')
})
$('.close-menu-icon, .header-overley').on('click', function(){
$('.header-overley').removeClass('show-overlay');
$('body').removeClass('stop-scroll');
$('.m-nav-hiiden').removeClass('m-nav-hiiden-active')
})
//get scroll-width
function getScrollBarWidth () {
var $outer = $('<div>').css({visibility: 'hidden', width: 100, overflow: 'scroll'}).appendTo('body'),
widthWithScroll = $('<div>').css({width: '100%'}).appendTo($outer).outerWidth();
$outer.remove();
return 100 - widthWithScroll;
};
//tabs
$(".tab-t").on("click",function(e){
$('.tab-t').removeClass('tab-t-active')
$(this).addClass('tab-t-active');
var activeWidth = $(this).innerWidth();
var itemPos = $(this).position();
$(".tab-t-selector").css({
"left":itemPos.left + "px",
"width": activeWidth + "px"
});
$('.offer').removeClass('active-tab');
$('[data-tab='+$(this).attr('data-select-tab')+']').addClass('active-tab');
})
if($('div').is(".tab-t-selector")){
$(".tab-t-selector").css({
"left":$('.tab-t-active').position().left + "px",
"width": $('.tab-t-active').innerWidth() + "px"
});
}
//price-tabs
$('.img-tab-c-th-img').on("click",function(e){
$(this).closest('.img-tab-c-th').next().find('img').attr('src',$(this).attr('data-full'))
$('.img-tab-c-th-img').removeClass('active-img-tab-c-th-img');
$(this).addClass('active-img-tab-c-th-img');
});
//menu
if($(window).width() > 992) {
$(window).scroll(function(){
if($(this).scrollTop()>400){
$('.site-header').addClass('fixed-header');
$('body').addClass('nav-fixed');
}
if ($(this).scrollTop()<350 && $('.site-header').hasClass('fixed-header')){
$('.site-header').addClass('header-animated');
}
if ($(this).scrollTop()>350 && $('.site-header').hasClass('header-animated')){
$('.site-header').removeClass('header-animated');
}
if ($(this).scrollTop() < 200){
$('.site-header').removeClass('fixed-header');
$('.site-header').removeClass('header-animated');
$('body').removeClass('nav-fixed');
}
});
}
//modal
function hideModal(){
$('[data-modal]').removeClass('visible-modal');
$('.modal-overley').removeClass('modal-overley-show');
setTimeout(function(){
$('body').removeClass('stop-scroll');
$('body').css('padding-right',0+'px');
$('.site-header').css('padding-right',0+'px')
}, 300);
}
$(".close-modal, .modal-overley").on("click",function(e){
hideModal()
})
$(document).keydown(function(eventObject){
if (eventObject.which == 27)
hideModal()
});
$('[data-modal-open]').on("click",function(e){
event.preventDefault()
$('[data-modal='+ $(this).attr('data-modal-open') +']').addClass('visible-modal')
if($(this).attr('data-modal-open')==1){
$('[data-modal=1] .modal-header-t').text($(this).prev('.cb-c-text').find('.bloc-t').text());
$('[data-modal=1] .modal-hero-descr').text($(this).prev('.cb-c-text').find('.descr').text());
$('[data-modal=1] .modal-hero-img').attr('src',$(this).next('.cb-c-img').attr('src'));
}
if($(this).attr('data-modal-open')==3){
$('[data-modal=3] .centured-m-t').text($(this).parentsUntil('.hf-t').find('h1').text());
$('[data-modal=3] .centured-m-d').text($(this).parentsUntil('.hf-t').find('.hero-content p:first-child').text());
$('[data-modal=3] .main-request-img').attr('src',$('.hf-img img').attr('src'));
}
$('.modal-overley').addClass('modal-overley-show');
$('body').addClass('stop-scroll');
$('body').css('padding-right',getScrollBarWidth ()+'px');
$('.site-header').css('padding-right',getScrollBarWidth ()+'px');
})
// centured-modal-close
$('.centured-modal').on("click",function(e){
hideModal()
}).children()
.click(function(e){
e.stopPropagation();
})
//post-nav
if($('div').is('.post-nav-container')){
if($(window).width() > 768){
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-links .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
//post-nav-active
$(window).scroll(function(){
var $sections = $('.sp-text-col h2,.sp-text-col h3,.sp-text-col h4');
$sections.each(function(i,el){
var top = $(el).offset().top;
console.log($(window).scrollTop())
var id = $(el).attr('id');
if( (top - $(window).scrollTop()) > 0 && (top - $(window).scrollTop()) < 120){
$('.post-nav-link-active').removeClass('post-nav-link-active');
$('a[href="#'+id+'"]').addClass('post-nav-link-active');
$(".post-nav-selector").css({
"top":$('a[href="#'+id+'"]').position().top + "px",
"height": $('a[href="#'+id+'"]').innerHeight() + "px"
});
}
})
});
}else{
$(".sp-text-col h2,.sp-text-col h3,.sp-text-col h4").each(function(i) {
var current = $(this);
current.attr("id", "title" + i);
$(".sp-mobile-nav .post-nav-container").append("<a calass='post-nav-link' id='link" + i + "' href='#title" + i + "' title='" + $(this).text() + "' data-anchor='" + true + "'>" + current.html() + "</a>");
});
}
}
//like-button
$('.sp-like-button').on("click",function(e){
$(this).addClass('sp-like-button-liked');
$('.sp-l-text').text(Number.parseInt($('.sp-l-text').text())+1);
})
//quiz
var curentStage=1;
$('.cond-quiz-counter-curent').text(curentStage);
$('.cond-quiz-footer-next').prop('disabled',true)
$('.count-line-counter').css('width',(100 / $('.cond-quiz-body').length) + '%')
$('.c-check input').on("change",function(e){
if($('.c-check input:checked').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.c-q-c-c-item').on("click",function(e){
$(this).toggleClass('c-q-c-c-item-selected')
if($('.c-q-c-c-item-selected').length > 0){
$('.cond-quiz-footer-next').prop('disabled',false)
}
else{
$('.cond-quiz-footer-next').prop('disabled',true)
}
})
$('.cond-quiz-footer-next').on("click",function(e){
if(curentStage==1) |
if(curentStage==2){
$('.c-q-c-c-item-selected').each(function(i) {
var values = $('[data-field-id=field--2]').val();
values += $(this).find('.c-q-c-c-item-descr').text();
$('[data-field-id=field--2]').val(values.replace(/\s/g, '')+',');
});
}
if(curentStage==3){
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--3]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--3]').val(values.replace(/\s/g, '')+', | {
$('[data-stage='+curentStage+'] .c-check input:checked').each(function(i) {
var values = $('[data-field-id=field--1]').val();
values += $(this).parent('.c-check').text();
$('[data-field-id=field--1]').val(values.replace(/\s/g, '')+',');
});
} | conditional_block |
CORAL_train.py | ", "", "")
flags.DEFINE_integer('n_test', 10000, 'Number of test images')
flags.DEFINE_string('test_txt', '', 'Test text(label) path')
flags.DEFINE_string('test_img', '', 'Test images path')
flags.DEFINE_string("output_loss_txt", "/yuwhan/Edisk/yuwhan/Edisk/4th_paper/age_banchmark/UTK/loss_CORAL.txt", "")
FLAGS = flags.FLAGS
FLAGS(sys.argv)
optimizer = tf.keras.optimizers.Adam(FLAGS.lr,beta_1=0.9, beta_2=0.99)
def _func(filename, label):
image_string = tf.io.read_file(filename)
decode_image = tf.image.decode_jpeg(image_string, channels=3)
decode_image = tf.image.resize(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8]) / 255.
#decode_image = tf.image.random_crop(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8, 3])
if random() > 0.5:
decode_image = tf.image.flip_left_right(decode_image)
#decode_image = tf.image.per_image_standardization(decode_image)
label = label - 16
one_hot = tf.one_hot(label, FLAGS.num_classes)
return decode_image, one_hot, label
def val_func(name, label):
image_string = tf.io.read_file(name)
decode_image = tf.image.decode_jpeg(image_string, channels=3)
decode_image = tf.image.resize(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8]) / 255.
#decode_image = tf.image.per_image_standardization(decode_image)
label = int(label) - 16
one_hot = tf.one_hot(label, FLAGS.num_classes)
return decode_image, one_hot
#@tf.function
def run_model(model, images):
|
@tf.function
def train_step(model, images, levels, imp):
with tf.GradientTape() as tape:
logits, probs = run_model(model, images)
#total_loss = (-tf.reduce_sum((tf.nn.log_softmax(logits, axis=2)[:,:,1]*levels + tf.nn.log_softmax(logits, axis=2)[:,:,0]*(1-levels))*imp, 1))
# total_loss = (-tf.reduce_sum( (tf.math.log_sigmoid(logits)*levels + tf.math.log(1. - tf.nn.sigmoid(logits))*(1-levels))*imp, 1))
total_loss = (-tf.reduce_sum( (tf.math.log_sigmoid(logits)*levels + (tf.math.log_sigmoid(logits) - logits)*(1-levels))*imp, 1))
#total_loss = -tf.reduce_sum((tf.math.log(tf.nn.softmax(logits, 2)[:, :, 1] + 1e-7) * levels \
# + tf.math.log(tf.nn.softmax(logits, 2)[:, :, 0] + 1e-7) * (1 - levels)) * imp, 1)
total_loss = tf.reduce_mean(total_loss)
gradients = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return total_loss
def task_importance_weights(data):
label = np.array(data).astype(np.float32)
num_examples = label.size
y = np.unique(label)
m = np.zeros(label.shape)
for i, t in enumerate(np.arange(np.min(y), np.max(y))):
m_k = np.max([label[label > t].size,
num_examples - label[label > t].size])
#print(m_k)
m_k = tf.cast(tf.convert_to_tensor(m_k), tf.float32)
m[i] = tf.sqrt(m_k)
# m[i] = float(m_k)**(0.5)
max_ = np.max(m)
imp = tf.cast(m / max_, tf.float32)
#print(imp)
return imp
@tf.function
def test_MAE(model, images, labels):
logits, probs = model(images, training=False)
predict = probs > 0.5
predict = tf.cast(predict, tf.float32)
pre_age = tf.reduce_sum(predict, 1)
grd_age = tf.argmax(labels, 1) + 1
grd_age = tf.cast(grd_age, tf.float32)
AE = tf.reduce_sum(tf.math.abs(grd_age - pre_age))
return AE
def make_levels(labels):
levels = []
for i in range(FLAGS.batch_size):
l = [1] * (labels[i].numpy()) + [0]*(FLAGS.num_classes - 1 - labels[i].numpy())
l = tf.cast(l, tf.float32)
levels.append(l)
return tf.convert_to_tensor(levels, tf.float32)
def main(argv=None):
# train_model = resnet_type1(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, 3), NUM_CLASSES=FLAGS.num_classes)
train_model = ResNet34(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, FLAGS.ch), include_top=False,
batch_size=FLAGS.batch_size, weight_path=FLAGS.weights, weights='imagenet')
regularizer = tf.keras.regularizers.l2(0.000005)
initializer = tf.keras.initializers.glorot_normal()
for layer in train_model.layers:
for attr in ['kernel_regularizer']:
if hasattr(layer, attr):
setattr(layer, attr, regularizer)
# for attr_ in ["kernel_initializer"]:
# if hasattr(layer, attr_):
# setattr(layer, attr_, initializer)
x = train_model.output
avgpool = tf.keras.layers.GlobalAveragePooling2D()(x)
# avgpool = tf.reshape(avgpool, [avgpool.shape[0], -1])
# fc = tf.keras.layers.Dense(1, use_bias=False)(avgpool)
# logits = Linear(NUM_CLASSES - 1)(fc)
logits = tf.keras.layers.Dense(FLAGS.num_classes-1, use_bias=False)(avgpool)
logits = Linear(FLAGS.num_classes - 1)(logits)
probs = tf.nn.sigmoid(logits)
train_model = tf.keras.Model(inputs=train_model.input, outputs=[logits, probs])
train_model.summary()
#for m in train_model.layers:
# if isinstance(m, tf.keras.layers.Conv2D):
# a = m.output_mask
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, (2. / n)**.5)
# elif isinstance(m, tf.keras.layers.BatchNormalization):
# m.get_weights
# m.weight.data.fill_(1)
# m.bias.data.zero_()
if FLAGS.pre_checkpoint is True:
ckpt = tf.train.Checkpoint(train_model=train_model, optimizer=optimizer)
ckpt_manager = tf.train.CheckpointManager(ckpt, FLAGS.pre_checkpoint_path, max_to_keep=5)
# if a checkpoint exists, restore the latest checkpoint.
if ckpt_manager.latest_checkpoint:
print(ckpt_manager.latest_checkpoint)
ckpt.restore(ckpt_manager.latest_checkpoint)
print ('Latest checkpoint restored!!')
if FLAGS.train == True:
data_name = np.loadtxt(FLAGS.txt_path, dtype='<U100', skiprows=0, usecols=0)
data_name = [FLAGS.img_path + data_name_ for data_name_ in data_name]
data_label = np.loadtxt(FLAGS.txt_path, dtype=np.int32, skiprows=0, usecols=1)
imp = task_importance_weights(data_label-16)
imp = imp[0:FLAGS.num_classes-1]
val_data_name = np.loadtxt(FLAGS.val_txt_path, dtype='<U100', skiprows=0, usecols=[0, 1, 2, 3])
print(len(val_data_name))
WM_img, WM_age = [], []
WF_img, WF_age = [], []
BM_img, BM_age = [], []
BF_img, BF_age = [], []
for i in range(len(val_data_name)):
if val_data_name[i][2] == "M" and val_data_name[i][3] == "W":
WM_img.append(FLAGS.val_img_path + val_data_name[i][0])
WM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "W":
WF_img.append(FLAGS.val_img_path + val_data_name[i][0])
WF_age.append(val_data_name[i][1])
if val_data_name[i][2] == "M" and val_data_name[i][3] == "B":
BM_img.append(FLAGS.val_img_path + val_data_name[i][0])
BM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "B":
BF_img.append(FLAGS.val_img_path + val_data_name[i][0])
BF_age.append(val_data_name[i][1])
print(len(WM_img), len(WF_img), len(BM_img), len(BF_img))
WM_img, WM_age = np.array(WM | logits, probs = model(images, training=True)
return logits, probs | identifier_body |
CORAL_train.py | .000005)
initializer = tf.keras.initializers.glorot_normal()
for layer in train_model.layers:
for attr in ['kernel_regularizer']:
if hasattr(layer, attr):
setattr(layer, attr, regularizer)
# for attr_ in ["kernel_initializer"]:
# if hasattr(layer, attr_):
# setattr(layer, attr_, initializer)
x = train_model.output
avgpool = tf.keras.layers.GlobalAveragePooling2D()(x)
# avgpool = tf.reshape(avgpool, [avgpool.shape[0], -1])
# fc = tf.keras.layers.Dense(1, use_bias=False)(avgpool)
# logits = Linear(NUM_CLASSES - 1)(fc)
logits = tf.keras.layers.Dense(FLAGS.num_classes-1, use_bias=False)(avgpool)
logits = Linear(FLAGS.num_classes - 1)(logits)
probs = tf.nn.sigmoid(logits)
train_model = tf.keras.Model(inputs=train_model.input, outputs=[logits, probs])
train_model.summary()
#for m in train_model.layers:
# if isinstance(m, tf.keras.layers.Conv2D):
# a = m.output_mask
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, (2. / n)**.5)
# elif isinstance(m, tf.keras.layers.BatchNormalization):
# m.get_weights
# m.weight.data.fill_(1)
# m.bias.data.zero_()
if FLAGS.pre_checkpoint is True:
ckpt = tf.train.Checkpoint(train_model=train_model, optimizer=optimizer)
ckpt_manager = tf.train.CheckpointManager(ckpt, FLAGS.pre_checkpoint_path, max_to_keep=5)
# if a checkpoint exists, restore the latest checkpoint.
if ckpt_manager.latest_checkpoint:
print(ckpt_manager.latest_checkpoint)
ckpt.restore(ckpt_manager.latest_checkpoint)
print ('Latest checkpoint restored!!')
if FLAGS.train == True:
data_name = np.loadtxt(FLAGS.txt_path, dtype='<U100', skiprows=0, usecols=0)
data_name = [FLAGS.img_path + data_name_ for data_name_ in data_name]
data_label = np.loadtxt(FLAGS.txt_path, dtype=np.int32, skiprows=0, usecols=1)
imp = task_importance_weights(data_label-16)
imp = imp[0:FLAGS.num_classes-1]
val_data_name = np.loadtxt(FLAGS.val_txt_path, dtype='<U100', skiprows=0, usecols=[0, 1, 2, 3])
print(len(val_data_name))
WM_img, WM_age = [], []
WF_img, WF_age = [], []
BM_img, BM_age = [], []
BF_img, BF_age = [], []
for i in range(len(val_data_name)):
if val_data_name[i][2] == "M" and val_data_name[i][3] == "W":
WM_img.append(FLAGS.val_img_path + val_data_name[i][0])
WM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "W":
WF_img.append(FLAGS.val_img_path + val_data_name[i][0])
WF_age.append(val_data_name[i][1])
if val_data_name[i][2] == "M" and val_data_name[i][3] == "B":
BM_img.append(FLAGS.val_img_path + val_data_name[i][0])
BM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "B":
BF_img.append(FLAGS.val_img_path + val_data_name[i][0])
BF_age.append(val_data_name[i][1])
print(len(WM_img), len(WF_img), len(BM_img), len(BF_img))
WM_img, WM_age = np.array(WM_img), np.array(WM_age)
WF_img, WF_age = np.array(WF_img), np.array(WF_age)
BM_img, BM_age = np.array(BM_img), np.array(BM_age)
BF_img, BF_age = np.array(BF_img), np.array(BF_age)
all_val_list = [[WM_img, WM_age], [WF_img, WF_age], [BM_img, BM_age], [BF_img, BF_age]]
batch_idx = len(data_label) // FLAGS.batch_size
#current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
#train_log_dir = FLAGS.graphs + current_time + '/train'
#val_log_dir = FLAGS.graphs + current_time + '/val'
#train_summary_writer = tf.summary.create_file_writer(train_log_dir)
#val_summary_writer = tf.summary.create_file_writer(val_log_dir)
loss_f = open(FLAGS.output_loss_txt, "w")
count = 0
for epoch in range(FLAGS.epochs):
A = list(zip(data_name, data_label))
shuffle(A)
data_name, data_label = zip(*A)
data_name = np.array(data_name)
data_label = np.array(data_label)
data_generator = tf.data.Dataset.from_tensor_slices((data_name, data_label))
data_generator = data_generator.shuffle(len(data_name))
data_generator = data_generator.map(_func)
data_generator = data_generator.batch(FLAGS.batch_size)
data_generator = data_generator.prefetch(tf.data.experimental.AUTOTUNE)
it = iter(data_generator)
#imp = task_importance_weights(data_label)
#imp = imp[0:FLAGS.num_classes-1]
for step in range(batch_idx):
batch_images, batch_labels, age = next(it)
levels = make_levels(age)
total_loss = train_step(train_model, batch_images, levels, imp)
#with val_summary_writer.as_default():
# tf.summary.scalar(u'total loss', loss, step=count)
if count % 10 == 0:
#MAE = test_MAE(train_model, batch_images, batch_labels, levels)
print('Epoch: {} [{}/{}] loss = {}'.format(epoch + 1, step + 1, batch_idx, total_loss))
if count % 100 == 0:
test_list = ["WM", "WF", "BM", "BF"]
for j in range(len(all_val_list)):
val_img, val_lab = all_val_list[j]
val_data_generator = tf.data.Dataset.from_tensor_slices((val_img, val_lab))
val_data_generator = val_data_generator.map(val_func)
val_data_generator = val_data_generator.batch(1)
val_data_generator = val_data_generator.prefetch(tf.data.experimental.AUTOTUNE)
val_idx = len(val_img) // 1
val_it = iter(val_data_generator)
AE = 0
for i in range(val_idx):
img, lab = next(val_it)
pre_age = test_MAE(train_model, img, lab)
AE += pre_age
print("MAE = {} ({})".format(AE / len(val_img), test_list[j]))
loss_f.write("Epochs: {}, step = {}".format(epoch, count))
loss_f.write(" --> ")
loss_f.write(test_list[j])
loss_f.write(": ")
loss_f.write(str(AE / len(val_img)))
loss_f.write(", ")
loss_f.write("\n")
loss_f.flush()
# print("==========")
# print("[2]MAE = {}".format(MAE))
# print("==========")
# model_dir = FLAGS.save_checkpoint
# folder_name = int((count + 1)/val_idx)
# folder_name_str = "%s/%s" % (model_dir, folder_name)
# if not os.path.isdir(folder_name_str):
# print("Make {} folder to save checkpoint".format(folder_name))
# os.makedirs(folder_name_str)
# ckpt = tf.train.Checkpoint(train_model=train_model, optimizer=optimizer)
# checkpoint_dir = folder_name_str + "/" + "CORAL_{}_steps.ckpt".format(count)
# ckpt_manager = tf.train.CheckpointManager(ckpt, checkpoint_dir, 5)
# ckpt_manager.save()
# with val_summary_writer.as_default():
# tf.summary.scalar(u'[2]MAE', MAE, step=count)
count += 1
else:
| data_name = np.loadtxt(FLAGS.test_txt, dtype='<U100', skiprows=0, usecols=0)
data_name = [FLAGS.test_img + data_name_ for data_name_ in data_name]
data_label = np.loadtxt(FLAGS.txt_path, dtype=np.float32, skiprows=0, usecols=1)
data_generator = tf.data.Dataset.from_tensor_slices((data_name, data_label))
data_generator = data_generator.shuffle(len(data_name))
data_generator = data_generator.map(_func)
data_generator = data_generator.batch(1)
data_generator = data_generator.prefetch(tf.data.experimental.AUTOTUNE)
MAE = 0
it = iter(data_generator)
for i in range(FLAGS.n_test):
image, labels, opp_labels = next(it)
_, probs = train_model(image, training=False)
predict = probs > 0.5
predict = tf.cast(predict, tf.float32) | conditional_block |
|
CORAL_train.py | ", "", "")
flags.DEFINE_integer('n_test', 10000, 'Number of test images')
flags.DEFINE_string('test_txt', '', 'Test text(label) path')
flags.DEFINE_string('test_img', '', 'Test images path')
flags.DEFINE_string("output_loss_txt", "/yuwhan/Edisk/yuwhan/Edisk/4th_paper/age_banchmark/UTK/loss_CORAL.txt", "")
FLAGS = flags.FLAGS
FLAGS(sys.argv)
optimizer = tf.keras.optimizers.Adam(FLAGS.lr,beta_1=0.9, beta_2=0.99)
def _func(filename, label):
image_string = tf.io.read_file(filename)
decode_image = tf.image.decode_jpeg(image_string, channels=3)
decode_image = tf.image.resize(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8]) / 255.
#decode_image = tf.image.random_crop(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8, 3])
if random() > 0.5:
decode_image = tf.image.flip_left_right(decode_image)
#decode_image = tf.image.per_image_standardization(decode_image)
label = label - 16
one_hot = tf.one_hot(label, FLAGS.num_classes)
return decode_image, one_hot, label
def val_func(name, label):
image_string = tf.io.read_file(name)
decode_image = tf.image.decode_jpeg(image_string, channels=3)
decode_image = tf.image.resize(decode_image, [FLAGS.img_size - 8, FLAGS.img_size - 8]) / 255.
#decode_image = tf.image.per_image_standardization(decode_image)
label = int(label) - 16
one_hot = tf.one_hot(label, FLAGS.num_classes)
return decode_image, one_hot
#@tf.function
def | (model, images):
logits, probs = model(images, training=True)
return logits, probs
@tf.function
def train_step(model, images, levels, imp):
with tf.GradientTape() as tape:
logits, probs = run_model(model, images)
#total_loss = (-tf.reduce_sum((tf.nn.log_softmax(logits, axis=2)[:,:,1]*levels + tf.nn.log_softmax(logits, axis=2)[:,:,0]*(1-levels))*imp, 1))
# total_loss = (-tf.reduce_sum( (tf.math.log_sigmoid(logits)*levels + tf.math.log(1. - tf.nn.sigmoid(logits))*(1-levels))*imp, 1))
total_loss = (-tf.reduce_sum( (tf.math.log_sigmoid(logits)*levels + (tf.math.log_sigmoid(logits) - logits)*(1-levels))*imp, 1))
#total_loss = -tf.reduce_sum((tf.math.log(tf.nn.softmax(logits, 2)[:, :, 1] + 1e-7) * levels \
# + tf.math.log(tf.nn.softmax(logits, 2)[:, :, 0] + 1e-7) * (1 - levels)) * imp, 1)
total_loss = tf.reduce_mean(total_loss)
gradients = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return total_loss
def task_importance_weights(data):
label = np.array(data).astype(np.float32)
num_examples = label.size
y = np.unique(label)
m = np.zeros(label.shape)
for i, t in enumerate(np.arange(np.min(y), np.max(y))):
m_k = np.max([label[label > t].size,
num_examples - label[label > t].size])
#print(m_k)
m_k = tf.cast(tf.convert_to_tensor(m_k), tf.float32)
m[i] = tf.sqrt(m_k)
# m[i] = float(m_k)**(0.5)
max_ = np.max(m)
imp = tf.cast(m / max_, tf.float32)
#print(imp)
return imp
@tf.function
def test_MAE(model, images, labels):
logits, probs = model(images, training=False)
predict = probs > 0.5
predict = tf.cast(predict, tf.float32)
pre_age = tf.reduce_sum(predict, 1)
grd_age = tf.argmax(labels, 1) + 1
grd_age = tf.cast(grd_age, tf.float32)
AE = tf.reduce_sum(tf.math.abs(grd_age - pre_age))
return AE
def make_levels(labels):
levels = []
for i in range(FLAGS.batch_size):
l = [1] * (labels[i].numpy()) + [0]*(FLAGS.num_classes - 1 - labels[i].numpy())
l = tf.cast(l, tf.float32)
levels.append(l)
return tf.convert_to_tensor(levels, tf.float32)
def main(argv=None):
# train_model = resnet_type1(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, 3), NUM_CLASSES=FLAGS.num_classes)
train_model = ResNet34(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, FLAGS.ch), include_top=False,
batch_size=FLAGS.batch_size, weight_path=FLAGS.weights, weights='imagenet')
regularizer = tf.keras.regularizers.l2(0.000005)
initializer = tf.keras.initializers.glorot_normal()
for layer in train_model.layers:
for attr in ['kernel_regularizer']:
if hasattr(layer, attr):
setattr(layer, attr, regularizer)
# for attr_ in ["kernel_initializer"]:
# if hasattr(layer, attr_):
# setattr(layer, attr_, initializer)
x = train_model.output
avgpool = tf.keras.layers.GlobalAveragePooling2D()(x)
# avgpool = tf.reshape(avgpool, [avgpool.shape[0], -1])
# fc = tf.keras.layers.Dense(1, use_bias=False)(avgpool)
# logits = Linear(NUM_CLASSES - 1)(fc)
logits = tf.keras.layers.Dense(FLAGS.num_classes-1, use_bias=False)(avgpool)
logits = Linear(FLAGS.num_classes - 1)(logits)
probs = tf.nn.sigmoid(logits)
train_model = tf.keras.Model(inputs=train_model.input, outputs=[logits, probs])
train_model.summary()
#for m in train_model.layers:
# if isinstance(m, tf.keras.layers.Conv2D):
# a = m.output_mask
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, (2. / n)**.5)
# elif isinstance(m, tf.keras.layers.BatchNormalization):
# m.get_weights
# m.weight.data.fill_(1)
# m.bias.data.zero_()
if FLAGS.pre_checkpoint is True:
ckpt = tf.train.Checkpoint(train_model=train_model, optimizer=optimizer)
ckpt_manager = tf.train.CheckpointManager(ckpt, FLAGS.pre_checkpoint_path, max_to_keep=5)
# if a checkpoint exists, restore the latest checkpoint.
if ckpt_manager.latest_checkpoint:
print(ckpt_manager.latest_checkpoint)
ckpt.restore(ckpt_manager.latest_checkpoint)
print ('Latest checkpoint restored!!')
if FLAGS.train == True:
data_name = np.loadtxt(FLAGS.txt_path, dtype='<U100', skiprows=0, usecols=0)
data_name = [FLAGS.img_path + data_name_ for data_name_ in data_name]
data_label = np.loadtxt(FLAGS.txt_path, dtype=np.int32, skiprows=0, usecols=1)
imp = task_importance_weights(data_label-16)
imp = imp[0:FLAGS.num_classes-1]
val_data_name = np.loadtxt(FLAGS.val_txt_path, dtype='<U100', skiprows=0, usecols=[0, 1, 2, 3])
print(len(val_data_name))
WM_img, WM_age = [], []
WF_img, WF_age = [], []
BM_img, BM_age = [], []
BF_img, BF_age = [], []
for i in range(len(val_data_name)):
if val_data_name[i][2] == "M" and val_data_name[i][3] == "W":
WM_img.append(FLAGS.val_img_path + val_data_name[i][0])
WM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "W":
WF_img.append(FLAGS.val_img_path + val_data_name[i][0])
WF_age.append(val_data_name[i][1])
if val_data_name[i][2] == "M" and val_data_name[i][3] == "B":
BM_img.append(FLAGS.val_img_path + val_data_name[i][0])
BM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "B":
BF_img.append(FLAGS.val_img_path + val_data_name[i][0])
BF_age.append(val_data_name[i][1])
print(len(WM_img), len(WF_img), len(BM_img), len(BF_img))
WM_img, WM_age = np.array(WM | run_model | identifier_name |
CORAL_train.py | ))*imp, 1))
#total_loss = -tf.reduce_sum((tf.math.log(tf.nn.softmax(logits, 2)[:, :, 1] + 1e-7) * levels \
# + tf.math.log(tf.nn.softmax(logits, 2)[:, :, 0] + 1e-7) * (1 - levels)) * imp, 1)
total_loss = tf.reduce_mean(total_loss)
gradients = tape.gradient(total_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return total_loss
def task_importance_weights(data):
label = np.array(data).astype(np.float32)
num_examples = label.size
y = np.unique(label)
m = np.zeros(label.shape)
for i, t in enumerate(np.arange(np.min(y), np.max(y))):
m_k = np.max([label[label > t].size,
num_examples - label[label > t].size])
#print(m_k)
m_k = tf.cast(tf.convert_to_tensor(m_k), tf.float32)
m[i] = tf.sqrt(m_k)
# m[i] = float(m_k)**(0.5)
max_ = np.max(m)
imp = tf.cast(m / max_, tf.float32)
#print(imp)
return imp
@tf.function
def test_MAE(model, images, labels):
logits, probs = model(images, training=False)
predict = probs > 0.5
predict = tf.cast(predict, tf.float32)
pre_age = tf.reduce_sum(predict, 1)
grd_age = tf.argmax(labels, 1) + 1
grd_age = tf.cast(grd_age, tf.float32)
AE = tf.reduce_sum(tf.math.abs(grd_age - pre_age))
return AE
def make_levels(labels):
levels = []
for i in range(FLAGS.batch_size):
l = [1] * (labels[i].numpy()) + [0]*(FLAGS.num_classes - 1 - labels[i].numpy())
l = tf.cast(l, tf.float32)
levels.append(l)
return tf.convert_to_tensor(levels, tf.float32)
def main(argv=None):
# train_model = resnet_type1(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, 3), NUM_CLASSES=FLAGS.num_classes)
train_model = ResNet34(input_shape=(FLAGS.img_size - 8, FLAGS.img_size - 8, FLAGS.ch), include_top=False,
batch_size=FLAGS.batch_size, weight_path=FLAGS.weights, weights='imagenet')
regularizer = tf.keras.regularizers.l2(0.000005)
initializer = tf.keras.initializers.glorot_normal()
for layer in train_model.layers:
for attr in ['kernel_regularizer']:
if hasattr(layer, attr):
setattr(layer, attr, regularizer)
# for attr_ in ["kernel_initializer"]:
# if hasattr(layer, attr_):
# setattr(layer, attr_, initializer)
x = train_model.output
avgpool = tf.keras.layers.GlobalAveragePooling2D()(x)
# avgpool = tf.reshape(avgpool, [avgpool.shape[0], -1])
# fc = tf.keras.layers.Dense(1, use_bias=False)(avgpool)
# logits = Linear(NUM_CLASSES - 1)(fc)
logits = tf.keras.layers.Dense(FLAGS.num_classes-1, use_bias=False)(avgpool)
logits = Linear(FLAGS.num_classes - 1)(logits)
probs = tf.nn.sigmoid(logits)
train_model = tf.keras.Model(inputs=train_model.input, outputs=[logits, probs])
train_model.summary()
#for m in train_model.layers:
# if isinstance(m, tf.keras.layers.Conv2D):
# a = m.output_mask
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, (2. / n)**.5)
# elif isinstance(m, tf.keras.layers.BatchNormalization):
# m.get_weights
# m.weight.data.fill_(1)
# m.bias.data.zero_()
if FLAGS.pre_checkpoint is True:
ckpt = tf.train.Checkpoint(train_model=train_model, optimizer=optimizer)
ckpt_manager = tf.train.CheckpointManager(ckpt, FLAGS.pre_checkpoint_path, max_to_keep=5)
# if a checkpoint exists, restore the latest checkpoint.
if ckpt_manager.latest_checkpoint:
print(ckpt_manager.latest_checkpoint)
ckpt.restore(ckpt_manager.latest_checkpoint)
print ('Latest checkpoint restored!!')
if FLAGS.train == True:
data_name = np.loadtxt(FLAGS.txt_path, dtype='<U100', skiprows=0, usecols=0)
data_name = [FLAGS.img_path + data_name_ for data_name_ in data_name]
data_label = np.loadtxt(FLAGS.txt_path, dtype=np.int32, skiprows=0, usecols=1)
imp = task_importance_weights(data_label-16)
imp = imp[0:FLAGS.num_classes-1]
val_data_name = np.loadtxt(FLAGS.val_txt_path, dtype='<U100', skiprows=0, usecols=[0, 1, 2, 3])
print(len(val_data_name))
WM_img, WM_age = [], []
WF_img, WF_age = [], []
BM_img, BM_age = [], []
BF_img, BF_age = [], []
for i in range(len(val_data_name)):
if val_data_name[i][2] == "M" and val_data_name[i][3] == "W":
WM_img.append(FLAGS.val_img_path + val_data_name[i][0])
WM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "W":
WF_img.append(FLAGS.val_img_path + val_data_name[i][0])
WF_age.append(val_data_name[i][1])
if val_data_name[i][2] == "M" and val_data_name[i][3] == "B":
BM_img.append(FLAGS.val_img_path + val_data_name[i][0])
BM_age.append(val_data_name[i][1])
if val_data_name[i][2] == "F" and val_data_name[i][3] == "B":
BF_img.append(FLAGS.val_img_path + val_data_name[i][0])
BF_age.append(val_data_name[i][1])
print(len(WM_img), len(WF_img), len(BM_img), len(BF_img))
WM_img, WM_age = np.array(WM_img), np.array(WM_age)
WF_img, WF_age = np.array(WF_img), np.array(WF_age)
BM_img, BM_age = np.array(BM_img), np.array(BM_age)
BF_img, BF_age = np.array(BF_img), np.array(BF_age)
all_val_list = [[WM_img, WM_age], [WF_img, WF_age], [BM_img, BM_age], [BF_img, BF_age]]
batch_idx = len(data_label) // FLAGS.batch_size
#current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
#train_log_dir = FLAGS.graphs + current_time + '/train'
#val_log_dir = FLAGS.graphs + current_time + '/val'
#train_summary_writer = tf.summary.create_file_writer(train_log_dir)
#val_summary_writer = tf.summary.create_file_writer(val_log_dir)
loss_f = open(FLAGS.output_loss_txt, "w")
count = 0
for epoch in range(FLAGS.epochs):
A = list(zip(data_name, data_label))
shuffle(A)
data_name, data_label = zip(*A)
data_name = np.array(data_name)
data_label = np.array(data_label)
data_generator = tf.data.Dataset.from_tensor_slices((data_name, data_label))
data_generator = data_generator.shuffle(len(data_name))
data_generator = data_generator.map(_func)
data_generator = data_generator.batch(FLAGS.batch_size)
data_generator = data_generator.prefetch(tf.data.experimental.AUTOTUNE)
it = iter(data_generator)
#imp = task_importance_weights(data_label)
#imp = imp[0:FLAGS.num_classes-1]
for step in range(batch_idx):
batch_images, batch_labels, age = next(it)
levels = make_levels(age)
total_loss = train_step(train_model, batch_images, levels, imp)
#with val_summary_writer.as_default():
# tf.summary.scalar(u'total loss', loss, step=count)
if count % 10 == 0:
#MAE = test_MAE(train_model, batch_images, batch_labels, levels)
print('Epoch: {} [{}/{}] loss = {}'.format(epoch + 1, step + 1, batch_idx, total_loss))
if count % 100 == 0:
test_list = ["WM", "WF", "BM", "BF"]
for j in range(len(all_val_list)):
val_img, val_lab = all_val_list[j]
val_data_generator = tf.data.Dataset.from_tensor_slices((val_img, val_lab))
val_data_generator = val_data_generator.map(val_func)
val_data_generator = val_data_generator.batch(1) | val_data_generator = val_data_generator.prefetch(tf.data.experimental.AUTOTUNE) | random_line_split |
|
sales-component.component.ts | SalePaymentMethod } from 'app/InventoryApp/Models/DTOs/SalePaymentMethod';
import { SaleUserBranchProductsDTO } from 'app/InventoryApp/Models/DTOs/SaleUserBranchProductsDTO';
import { DxStoreOptions } from 'app/InventoryApp/Models/DxStoreOptions';
import { LoginResponse } from 'app/InventoryApp/Models/LoginResponse';
import { UIResponse } from 'app/InventoryApp/Models/UIResponse';
import { PaymentScreenComponent } from 'app/InventoryApp/Selling/PaymentScreen/payment-screen.component';
import { DxStoreService } from 'app/InventoryApp/services/dx-store.service';
import { NormalSatisService } from 'app/InventoryApp/services/normal-satis.service';
import { SwalService } from 'app/InventoryApp/services/Swal.Service';
import { DxDataGridComponent, DxLookupComponent } from 'devextreme-angular';
import CustomStore from 'devextreme/data/custom_store';
import DataSource from 'devextreme/data/data_source';
import { Subscription } from 'rxjs';
import { tap } from 'rxjs/operators';
@Component({
selector: 'app-sales-component',
templateUrl: './sales-component.component.html',
styleUrls: ['./sales-component.component.scss'],
encapsulation: ViewEncapsulation.None,
animations: fuseAnimations
})
export class SalesComponentComponent implements OnInit {
RangeStartDate: Date;
RangeEndDate: Date;
Genders: any = [{ Value: 0, ViewValue: 'Erkek' }, { Value: 1, ViewValue: 'Kadın' }]
// Here I used Material Table
ProductsToSellDisplayedColumns = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName', 'actions'];
ProductsToSellDataSource: ProductView[] = [];
ProductsToSellTableRows: ProductView[] = [];
@ViewChild("soledProductsGrid") soledProductsGrid: DxDataGridComponent;
@ViewChild("customerInfoLookup") customerInfoLookup: DxLookupComponent;
paymentDetailText: string;
// soledProductsDetailsText: DxDataGridComponent;
ProductsToSellTotalPrice: number;
// Here I am using DevExtreme
displayedColumnsSelledProducts = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName'];
SoledProductsDatasource: SaleUserBranchProductsDTO[];
soledProductsDS: DataSource;
// I am using this to unsubscribe after leaving component
private unsubscribe: Subscription[] = [];
ProductAndPriceFormGroup: FormGroup;
ProductsToSellTableId: number = 0;
ProductSellingDto: ProductSellingDto;
@ViewChild('PriceInput') PriceInput: ElementRef;
@ViewChild('productCode') productCode: ElementRef;
filterByToday: Array<any>;
today: Date = new Date();
userDetails = JSON.parse(localStorage.getItem('user')) as LoginResponse;
UserList = [{ Id: 1, Name: "Orhan" }, { Id: 1015, Name: "Uğur" }]
selectedUserId: number;
Operations: any = [{ Id: 0, Value: 'Satıldı' }, { Id: 1, Value: 'İade Edildi' }, { Id: 2, Value: 'Başka bir ürünle değiştirild' }, , { Id: 3, Value: 'Değiştirlen bir ürünün yerine bu alındı' }]
constructor(public _translate: TranslateService,
private normalSatisSerice: NormalSatisService,
private swal: SwalService,
public dialog: MatDialog,
private fb: FormBuilder,
private dxStore: DxStoreService) {
this.InitlizeSelledProductsDatasource();
this.InitilizeProductAndPriceForm();
}
ngOnInit(): void {
}
ngOnDestroy(): void {
this.unsubscribe.forEach(sb => sb.unsubscribe());
}
InitilizeProductAndPriceForm() {
this.ProductAndPriceFormGroup = this.fb.group({
ProductBarcode: ['', Validators.compose([
Validators.required
])
],
SellingPrice: ['', Validators.compose([
Validators.required,
])],
UserId: [1, Validators.compose([
Validators.required,
])],
});
}
public hasError = (controlName: string, errorName: string) => {
return this.ProductAndPriceFormGroup.controls[controlName].hasError(errorName);
}
InitlizeSelledProductsDatasource() {
let storeOptions: DxStoreOptions = {
loadUrl: "NormalSatis/GetSelledProductsByUserId", loadParams: { Id: this.userDetails.userId }, Key: "Id"
};
this.soledProductsDS = new DataSource({
store: this.dxStore.GetStore(storeOptions)
})
// this.normalSatisSerice.GetSoledProductsByUserID(1).toPromise().then((res: UIResponse<SaleUserBranchProductsDTO[]>) => {
// this.SoledProductsDatasource = res.Entity;
// this.ProductsToSellTableRows = [];
// });
}
productView: ProductView;
AddProduct() {
this.selectedUserId = this.ProductAndPriceFormGroup.controls.UserId.value;
this.productView.SellingPrice = this.ProductAndPriceFormGroup.controls.SellingPrice.value;
this.ProductsToSellTableRows.push(this.productView);
this.AssingDataToProductsToSellTable();
this.ProductAndPriceFormGroup.reset();
this.lowProductCount = false;
this.hasCampaign = false;
}
AssingDataToProductsToSellTable() {
this.ProductsToSellDataSource = this.ProductsToSellTableRows;
}
DeleteFromProductsToSellTable(row: ProductView) {
this.ProductsToSellTableRows = this.ProductsToSellTableRows.filter(fi => fi.TempId != row.TempId);
this.AssingDataToProductsToSellTable();
}
getGendere(gender | {
// 0 means Erkek, 1 means Kadin
return gender ? "Kadın" : "Erkek"
}
openSatisDialog() {
const dialogRef = this.dialog.open(PaymentScreenComponent, {
height: '600px',
width: '800px',
data: { Total: this.ProductsToSellTotalPrice = this.ProductsToSellDataSource.map(t => t.SellingPrice).reduce((acc, value) => +acc + +value, 0), CustomerInfoId: 0 }
});
this.unsubscribe.push(dialogRef.afterClosed().subscribe(async (result: PaymentPopup[]) => {
if (result?.length > 0) {
this.ProductsToSellDataSource = [];
let PaymentMethodIds: number[] = result.map(value => value.PaymentMethodId);
let ProductIds: number[] = this.ProductsToSellTableRows.map(value => value.Id);
let SellincPrices: number[] = this.ProductsToSellTableRows.map(value => value.SellingPrice);
let CampaignIds: number[] = this.ProductsToSellTableRows.map(value => value.CampaingId);
let salePaymentMethods: SalePaymentMethod[] = result.map(value => <SalePaymentMethod>{ Amount: value.Amount, DefferedPaymentCount: value.DefferedPaymentCount, PaymentMethodId: value.PaymentMethodId });
//UserId: this.userDetails.userId,
let ProductSellingDto: ProductSellingDto = { ProductIdsAndPricesAndCampaignIds: { SellingPrices: SellincPrices, ProductIds: ProductIds, CampaignIds: CampaignIds }, CustomerInfoId: result[0].CustomerInfo.Id, CustomerName: result[0].CustomerInfo.CustomerName, CustomerPhone: result[0].CustomerInfo.CustomerPhone, Receipt: result[0].Receipt, PaymentMethodIds: PaymentMethodIds, Total: this.ProductsToSellTotalPrice, UserId: this.selectedUserId, BranchId: this.ProductsToSellTableRows[0].BranchId, SalePaymentMethods: salePaymentMethods };
await this.normalSatisSerice.SellProducts(ProductSellingDto).pipe(tap(t => this.swal.showSuccessMessage())).toPromise();
this.ProductsToSellTableRows = [];
this.soledProductsGrid.instance.refresh();
}
}));
}
isProductExist = false;
lowProductCount = false;
hasCampaign = false;
isProductCountEnough = false;
async productCodeFocusOut() {
this.PriceInput.nativeElement.focus();
const productCode = this.ProductAndPriceFormGroup.controls.ProductBarcode.value;
if (productCode && productCode.length == 12) {
let res: UIResponse<ProductView> = await this.normalSatisSerice.GetProductDetails(productCode).toPromise();
if (!res.IsError) {
this.isProductExist = true;
this.productView = res.Entity;
this.ProductAndPriceFormGroup.controls.SellingPrice.setValue(this.productView.SellingPrice);
this.productView.TempId = this.ProductsToSellTableId++;
let ProductCount = this.ProductsToSellDataSource.filter(fi => fi.Id == this.productView.Id).length;
this.productView.Count -= ProductCount;
if (this.productView.Count <= 10) {
this.lowProductCount = true;
} else {
this.lowProductCount = false;
}
if (this.productView.Count == 0) {
this.isProductCountEnough = false;
} else {
this.isProductCountEnough = true;
}
if (this.productView.CampaingId != 0) | : number) | identifier_name |
sales-component.component.ts | SalePaymentMethod } from 'app/InventoryApp/Models/DTOs/SalePaymentMethod';
import { SaleUserBranchProductsDTO } from 'app/InventoryApp/Models/DTOs/SaleUserBranchProductsDTO';
import { DxStoreOptions } from 'app/InventoryApp/Models/DxStoreOptions';
import { LoginResponse } from 'app/InventoryApp/Models/LoginResponse';
import { UIResponse } from 'app/InventoryApp/Models/UIResponse';
import { PaymentScreenComponent } from 'app/InventoryApp/Selling/PaymentScreen/payment-screen.component';
import { DxStoreService } from 'app/InventoryApp/services/dx-store.service';
import { NormalSatisService } from 'app/InventoryApp/services/normal-satis.service';
import { SwalService } from 'app/InventoryApp/services/Swal.Service';
import { DxDataGridComponent, DxLookupComponent } from 'devextreme-angular';
import CustomStore from 'devextreme/data/custom_store';
import DataSource from 'devextreme/data/data_source';
import { Subscription } from 'rxjs';
import { tap } from 'rxjs/operators';
@Component({
selector: 'app-sales-component',
templateUrl: './sales-component.component.html',
styleUrls: ['./sales-component.component.scss'],
encapsulation: ViewEncapsulation.None,
animations: fuseAnimations
})
export class SalesComponentComponent implements OnInit {
RangeStartDate: Date;
RangeEndDate: Date;
Genders: any = [{ Value: 0, ViewValue: 'Erkek' }, { Value: 1, ViewValue: 'Kadın' }]
// Here I used Material Table
ProductsToSellDisplayedColumns = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName', 'actions'];
ProductsToSellDataSource: ProductView[] = [];
ProductsToSellTableRows: ProductView[] = [];
@ViewChild("soledProductsGrid") soledProductsGrid: DxDataGridComponent;
@ViewChild("customerInfoLookup") customerInfoLookup: DxLookupComponent;
paymentDetailText: string;
// soledProductsDetailsText: DxDataGridComponent;
ProductsToSellTotalPrice: number;
// Here I am using DevExtreme
displayedColumnsSelledProducts = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName'];
SoledProductsDatasource: SaleUserBranchProductsDTO[];
soledProductsDS: DataSource;
// I am using this to unsubscribe after leaving component
private unsubscribe: Subscription[] = [];
ProductAndPriceFormGroup: FormGroup;
ProductsToSellTableId: number = 0;
ProductSellingDto: ProductSellingDto;
@ViewChild('PriceInput') PriceInput: ElementRef;
@ViewChild('productCode') productCode: ElementRef;
filterByToday: Array<any>;
today: Date = new Date();
userDetails = JSON.parse(localStorage.getItem('user')) as LoginResponse;
UserList = [{ Id: 1, Name: "Orhan" }, { Id: 1015, Name: "Uğur" }]
selectedUserId: number;
Operations: any = [{ Id: 0, Value: 'Satıldı' }, { Id: 1, Value: 'İade Edildi' }, { Id: 2, Value: 'Başka bir ürünle değiştirild' }, , { Id: 3, Value: 'Değiştirlen bir ürünün yerine bu alındı' }]
constructor(public _translate: TranslateService,
private normalSatisSerice: NormalSatisService,
private swal: SwalService,
public dialog: MatDialog,
private fb: FormBuilder,
private dxStore: DxStoreService) {
this.InitlizeSelledProductsDatasource();
this.InitilizeProductAndPriceForm();
}
ngOnInit(): void {
}
ngOnDest | : void {
this.unsubscribe.forEach(sb => sb.unsubscribe());
}
InitilizeProductAndPriceForm() {
this.ProductAndPriceFormGroup = this.fb.group({
ProductBarcode: ['', Validators.compose([
Validators.required
])
],
SellingPrice: ['', Validators.compose([
Validators.required,
])],
UserId: [1, Validators.compose([
Validators.required,
])],
});
}
public hasError = (controlName: string, errorName: string) => {
return this.ProductAndPriceFormGroup.controls[controlName].hasError(errorName);
}
InitlizeSelledProductsDatasource() {
let storeOptions: DxStoreOptions = {
loadUrl: "NormalSatis/GetSelledProductsByUserId", loadParams: { Id: this.userDetails.userId }, Key: "Id"
};
this.soledProductsDS = new DataSource({
store: this.dxStore.GetStore(storeOptions)
})
// this.normalSatisSerice.GetSoledProductsByUserID(1).toPromise().then((res: UIResponse<SaleUserBranchProductsDTO[]>) => {
// this.SoledProductsDatasource = res.Entity;
// this.ProductsToSellTableRows = [];
// });
}
productView: ProductView;
AddProduct() {
this.selectedUserId = this.ProductAndPriceFormGroup.controls.UserId.value;
this.productView.SellingPrice = this.ProductAndPriceFormGroup.controls.SellingPrice.value;
this.ProductsToSellTableRows.push(this.productView);
this.AssingDataToProductsToSellTable();
this.ProductAndPriceFormGroup.reset();
this.lowProductCount = false;
this.hasCampaign = false;
}
AssingDataToProductsToSellTable() {
this.ProductsToSellDataSource = this.ProductsToSellTableRows;
}
DeleteFromProductsToSellTable(row: ProductView) {
this.ProductsToSellTableRows = this.ProductsToSellTableRows.filter(fi => fi.TempId != row.TempId);
this.AssingDataToProductsToSellTable();
}
getGendere(gender: number) {
// 0 means Erkek, 1 means Kadin
return gender ? "Kadın" : "Erkek"
}
openSatisDialog() {
const dialogRef = this.dialog.open(PaymentScreenComponent, {
height: '600px',
width: '800px',
data: { Total: this.ProductsToSellTotalPrice = this.ProductsToSellDataSource.map(t => t.SellingPrice).reduce((acc, value) => +acc + +value, 0), CustomerInfoId: 0 }
});
this.unsubscribe.push(dialogRef.afterClosed().subscribe(async (result: PaymentPopup[]) => {
if (result?.length > 0) {
this.ProductsToSellDataSource = [];
let PaymentMethodIds: number[] = result.map(value => value.PaymentMethodId);
let ProductIds: number[] = this.ProductsToSellTableRows.map(value => value.Id);
let SellincPrices: number[] = this.ProductsToSellTableRows.map(value => value.SellingPrice);
let CampaignIds: number[] = this.ProductsToSellTableRows.map(value => value.CampaingId);
let salePaymentMethods: SalePaymentMethod[] = result.map(value => <SalePaymentMethod>{ Amount: value.Amount, DefferedPaymentCount: value.DefferedPaymentCount, PaymentMethodId: value.PaymentMethodId });
//UserId: this.userDetails.userId,
let ProductSellingDto: ProductSellingDto = { ProductIdsAndPricesAndCampaignIds: { SellingPrices: SellincPrices, ProductIds: ProductIds, CampaignIds: CampaignIds }, CustomerInfoId: result[0].CustomerInfo.Id, CustomerName: result[0].CustomerInfo.CustomerName, CustomerPhone: result[0].CustomerInfo.CustomerPhone, Receipt: result[0].Receipt, PaymentMethodIds: PaymentMethodIds, Total: this.ProductsToSellTotalPrice, UserId: this.selectedUserId, BranchId: this.ProductsToSellTableRows[0].BranchId, SalePaymentMethods: salePaymentMethods };
await this.normalSatisSerice.SellProducts(ProductSellingDto).pipe(tap(t => this.swal.showSuccessMessage())).toPromise();
this.ProductsToSellTableRows = [];
this.soledProductsGrid.instance.refresh();
}
}));
}
isProductExist = false;
lowProductCount = false;
hasCampaign = false;
isProductCountEnough = false;
async productCodeFocusOut() {
this.PriceInput.nativeElement.focus();
const productCode = this.ProductAndPriceFormGroup.controls.ProductBarcode.value;
if (productCode && productCode.length == 12) {
let res: UIResponse<ProductView> = await this.normalSatisSerice.GetProductDetails(productCode).toPromise();
if (!res.IsError) {
this.isProductExist = true;
this.productView = res.Entity;
this.ProductAndPriceFormGroup.controls.SellingPrice.setValue(this.productView.SellingPrice);
this.productView.TempId = this.ProductsToSellTableId++;
let ProductCount = this.ProductsToSellDataSource.filter(fi => fi.Id == this.productView.Id).length;
this.productView.Count -= ProductCount;
if (this.productView.Count <= 10) {
this.lowProductCount = true;
} else {
this.lowProductCount = false;
}
if (this.productView.Count == 0) {
this.isProductCountEnough = false;
} else {
this.isProductCountEnough = true;
}
if (this.productView.CampaingId != | roy() | identifier_body |
sales-component.component.ts | DTO } from 'app/InventoryApp/Models/DTOs/SaleUserBranchProductsDTO';
import { DxStoreOptions } from 'app/InventoryApp/Models/DxStoreOptions';
import { LoginResponse } from 'app/InventoryApp/Models/LoginResponse';
import { UIResponse } from 'app/InventoryApp/Models/UIResponse';
import { PaymentScreenComponent } from 'app/InventoryApp/Selling/PaymentScreen/payment-screen.component';
import { DxStoreService } from 'app/InventoryApp/services/dx-store.service';
import { NormalSatisService } from 'app/InventoryApp/services/normal-satis.service';
import { SwalService } from 'app/InventoryApp/services/Swal.Service';
import { DxDataGridComponent, DxLookupComponent } from 'devextreme-angular';
import CustomStore from 'devextreme/data/custom_store';
import DataSource from 'devextreme/data/data_source';
import { Subscription } from 'rxjs';
import { tap } from 'rxjs/operators';
@Component({
selector: 'app-sales-component',
templateUrl: './sales-component.component.html',
styleUrls: ['./sales-component.component.scss'],
encapsulation: ViewEncapsulation.None,
animations: fuseAnimations
})
export class SalesComponentComponent implements OnInit {
RangeStartDate: Date;
RangeEndDate: Date;
Genders: any = [{ Value: 0, ViewValue: 'Erkek' }, { Value: 1, ViewValue: 'Kadın' }]
// Here I used Material Table
ProductsToSellDisplayedColumns = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName', 'actions'];
ProductsToSellDataSource: ProductView[] = [];
ProductsToSellTableRows: ProductView[] = [];
@ViewChild("soledProductsGrid") soledProductsGrid: DxDataGridComponent;
@ViewChild("customerInfoLookup") customerInfoLookup: DxLookupComponent;
paymentDetailText: string;
// soledProductsDetailsText: DxDataGridComponent;
ProductsToSellTotalPrice: number;
// Here I am using DevExtreme
displayedColumnsSelledProducts = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName'];
SoledProductsDatasource: SaleUserBranchProductsDTO[];
soledProductsDS: DataSource;
// I am using this to unsubscribe after leaving component
private unsubscribe: Subscription[] = [];
ProductAndPriceFormGroup: FormGroup;
ProductsToSellTableId: number = 0;
ProductSellingDto: ProductSellingDto;
@ViewChild('PriceInput') PriceInput: ElementRef;
@ViewChild('productCode') productCode: ElementRef;
filterByToday: Array<any>;
today: Date = new Date();
userDetails = JSON.parse(localStorage.getItem('user')) as LoginResponse;
UserList = [{ Id: 1, Name: "Orhan" }, { Id: 1015, Name: "Uğur" }]
selectedUserId: number;
Operations: any = [{ Id: 0, Value: 'Satıldı' }, { Id: 1, Value: 'İade Edildi' }, { Id: 2, Value: 'Başka bir ürünle değiştirild' }, , { Id: 3, Value: 'Değiştirlen bir ürünün yerine bu alındı' }]
constructor(public _translate: TranslateService,
private normalSatisSerice: NormalSatisService,
private swal: SwalService,
public dialog: MatDialog,
private fb: FormBuilder,
private dxStore: DxStoreService) {
this.InitlizeSelledProductsDatasource();
this.InitilizeProductAndPriceForm();
}
ngOnInit(): void {
}
ngOnDestroy(): void {
this.unsubscribe.forEach(sb => sb.unsubscribe());
}
InitilizeProductAndPriceForm() {
this.ProductAndPriceFormGroup = this.fb.group({
ProductBarcode: ['', Validators.compose([
Validators.required
])
],
SellingPrice: ['', Validators.compose([
Validators.required,
])],
UserId: [1, Validators.compose([
Validators.required,
])],
});
}
public hasError = (controlName: string, errorName: string) => {
return this.ProductAndPriceFormGroup.controls[controlName].hasError(errorName);
}
InitlizeSelledProductsDatasource() {
let storeOptions: DxStoreOptions = {
loadUrl: "NormalSatis/GetSelledProductsByUserId", loadParams: { Id: this.userDetails.userId }, Key: "Id"
};
this.soledProductsDS = new DataSource({
store: this.dxStore.GetStore(storeOptions)
})
// this.normalSatisSerice.GetSoledProductsByUserID(1).toPromise().then((res: UIResponse<SaleUserBranchProductsDTO[]>) => {
// this.SoledProductsDatasource = res.Entity;
// this.ProductsToSellTableRows = [];
// });
}
productView: ProductView;
AddProduct() {
this.selectedUserId = this.ProductAndPriceFormGroup.controls.UserId.value;
this.productView.SellingPrice = this.ProductAndPriceFormGroup.controls.SellingPrice.value;
this.ProductsToSellTableRows.push(this.productView);
this.AssingDataToProductsToSellTable();
this.ProductAndPriceFormGroup.reset();
this.lowProductCount = false;
this.hasCampaign = false;
}
AssingDataToProductsToSellTable() {
this.ProductsToSellDataSource = this.ProductsToSellTableRows;
}
DeleteFromProductsToSellTable(row: ProductView) {
this.ProductsToSellTableRows = this.ProductsToSellTableRows.filter(fi => fi.TempId != row.TempId);
this.AssingDataToProductsToSellTable();
}
getGendere(gender: number) {
// 0 means Erkek, 1 means Kadin
return gender ? "Kadın" : "Erkek"
}
openSatisDialog() {
const dialogRef = this.dialog.open(PaymentScreenComponent, {
height: '600px',
width: '800px',
data: { Total: this.ProductsToSellTotalPrice = this.ProductsToSellDataSource.map(t => t.SellingPrice).reduce((acc, value) => +acc + +value, 0), CustomerInfoId: 0 }
});
this.unsubscribe.push(dialogRef.afterClosed().subscribe(async (result: PaymentPopup[]) => {
if (result?.length > 0) {
this.ProductsToSellDataSource = [];
let PaymentMethodIds: number[] = result.map(value => value.PaymentMethodId);
let ProductIds: number[] = this.ProductsToSellTableRows.map(value => value.Id);
let SellincPrices: number[] = this.ProductsToSellTableRows.map(value => value.SellingPrice);
let CampaignIds: number[] = this.ProductsToSellTableRows.map(value => value.CampaingId);
let salePaymentMethods: SalePaymentMethod[] = result.map(value => <SalePaymentMethod>{ Amount: value.Amount, DefferedPaymentCount: value.DefferedPaymentCount, PaymentMethodId: value.PaymentMethodId });
//UserId: this.userDetails.userId,
let ProductSellingDto: ProductSellingDto = { ProductIdsAndPricesAndCampaignIds: { SellingPrices: SellincPrices, ProductIds: ProductIds, CampaignIds: CampaignIds }, CustomerInfoId: result[0].CustomerInfo.Id, CustomerName: result[0].CustomerInfo.CustomerName, CustomerPhone: result[0].CustomerInfo.CustomerPhone, Receipt: result[0].Receipt, PaymentMethodIds: PaymentMethodIds, Total: this.ProductsToSellTotalPrice, UserId: this.selectedUserId, BranchId: this.ProductsToSellTableRows[0].BranchId, SalePaymentMethods: salePaymentMethods };
await this.normalSatisSerice.SellProducts(ProductSellingDto).pipe(tap(t => this.swal.showSuccessMessage())).toPromise();
this.ProductsToSellTableRows = [];
this.soledProductsGrid.instance.refresh();
}
}));
}
isProductExist = false;
lowProductCount = false;
hasCampaign = false;
isProductCountEnough = false;
async productCodeFocusOut() {
this.PriceInput.nativeElement.focus();
const productCode = this.ProductAndPriceFormGroup.controls.ProductBarcode.value;
if (productCode && productCode.length == 12) {
let res: UIResponse<ProductView> = await this.normalSatisSerice.GetProductDetails(productCode).toPromise();
if (!res.IsError) {
this.isProductExist = true;
this.productView = res.Entity;
this.ProductAndPriceFormGroup.controls.SellingPrice.setValue(this.productView.SellingPrice);
this.productView.TempId = this.ProductsToSellTableId++;
let ProductCount = this.ProductsToSellDataSource.filter(fi => fi.Id == this.productView.Id).length;
this.productView.Count -= ProductCount;
if (this.productView.Count <= 10) {
this.lowProductCount = true;
} else {
this.lowProductCount = false;
}
if (this.productView.Count == 0) {
this.isProductCountEnough = false;
} else {
this.isProductCountEnough = true;
}
if (this.productView.CampaingId != 0) {
this.hasCampaign = true;
} else {
this.h | asCampaign = false;
}
} else {
| conditional_block |
|
sales-component.component.ts | SalePaymentMethod } from 'app/InventoryApp/Models/DTOs/SalePaymentMethod';
import { SaleUserBranchProductsDTO } from 'app/InventoryApp/Models/DTOs/SaleUserBranchProductsDTO';
import { DxStoreOptions } from 'app/InventoryApp/Models/DxStoreOptions';
import { LoginResponse } from 'app/InventoryApp/Models/LoginResponse';
import { UIResponse } from 'app/InventoryApp/Models/UIResponse';
import { PaymentScreenComponent } from 'app/InventoryApp/Selling/PaymentScreen/payment-screen.component';
import { DxStoreService } from 'app/InventoryApp/services/dx-store.service';
import { NormalSatisService } from 'app/InventoryApp/services/normal-satis.service';
import { SwalService } from 'app/InventoryApp/services/Swal.Service';
import { DxDataGridComponent, DxLookupComponent } from 'devextreme-angular';
import CustomStore from 'devextreme/data/custom_store';
import DataSource from 'devextreme/data/data_source';
import { Subscription } from 'rxjs';
import { tap } from 'rxjs/operators';
@Component({
selector: 'app-sales-component',
templateUrl: './sales-component.component.html',
styleUrls: ['./sales-component.component.scss'],
encapsulation: ViewEncapsulation.None,
animations: fuseAnimations
})
export class SalesComponentComponent implements OnInit {
RangeStartDate: Date;
RangeEndDate: Date;
Genders: any = [{ Value: 0, ViewValue: 'Erkek' }, { Value: 1, ViewValue: 'Kadın' }]
// Here I used Material Table
ProductsToSellDisplayedColumns = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName', 'actions'];
ProductsToSellDataSource: ProductView[] = [];
ProductsToSellTableRows: ProductView[] = [];
@ViewChild("soledProductsGrid") soledProductsGrid: DxDataGridComponent;
@ViewChild("customerInfoLookup") customerInfoLookup: DxLookupComponent;
paymentDetailText: string;
// soledProductsDetailsText: DxDataGridComponent;
ProductsToSellTotalPrice: number;
// Here I am using DevExtreme
displayedColumnsSelledProducts = ['ProductName', 'ProductBarcode', 'ProductCode', 'ColorName', 'Gender', 'ProductYear', 'SellingPrice', 'Size', 'BranchName'];
SoledProductsDatasource: SaleUserBranchProductsDTO[];
soledProductsDS: DataSource;
// I am using this to unsubscribe after leaving component
private unsubscribe: Subscription[] = [];
ProductAndPriceFormGroup: FormGroup;
ProductsToSellTableId: number = 0;
ProductSellingDto: ProductSellingDto;
@ViewChild('PriceInput') PriceInput: ElementRef;
@ViewChild('productCode') productCode: ElementRef;
filterByToday: Array<any>;
today: Date = new Date();
userDetails = JSON.parse(localStorage.getItem('user')) as LoginResponse;
UserList = [{ Id: 1, Name: "Orhan" }, { Id: 1015, Name: "Uğur" }]
selectedUserId: number;
Operations: any = [{ Id: 0, Value: 'Satıldı' }, { Id: 1, Value: 'İade Edildi' }, { Id: 2, Value: 'Başka bir ürünle değiştirild' }, , { Id: 3, Value: 'Değiştirlen bir ürünün yerine bu alındı' }]
constructor(public _translate: TranslateService,
private normalSatisSerice: NormalSatisService,
private swal: SwalService,
public dialog: MatDialog,
private fb: FormBuilder,
private dxStore: DxStoreService) {
this.InitlizeSelledProductsDatasource();
this.InitilizeProductAndPriceForm();
}
ngOnInit(): void {
}
ngOnDestroy(): void {
this.unsubscribe.forEach(sb => sb.unsubscribe());
}
InitilizeProductAndPriceForm() {
this.ProductAndPriceFormGroup = this.fb.group({
ProductBarcode: ['', Validators.compose([
Validators.required
])
],
SellingPrice: ['', Validators.compose([
Validators.required,
])],
UserId: [1, Validators.compose([
Validators.required,
])],
});
}
public hasError = (controlName: string, errorName: string) => {
return this.ProductAndPriceFormGroup.controls[controlName].hasError(errorName);
}
InitlizeSelledProductsDatasource() {
let storeOptions: DxStoreOptions = {
loadUrl: "NormalSatis/GetSelledProductsByUserId", loadParams: { Id: this.userDetails.userId }, Key: "Id"
};
this.soledProductsDS = new DataSource({
store: this.dxStore.GetStore(storeOptions)
})
// this.normalSatisSerice.GetSoledProductsByUserID(1).toPromise().then((res: UIResponse<SaleUserBranchProductsDTO[]>) => {
// this.SoledProductsDatasource = res.Entity;
// this.ProductsToSellTableRows = [];
// });
}
productView: ProductView;
AddProduct() {
this.selectedUserId = this.ProductAndPriceFormGroup.controls.UserId.value;
this.productView.SellingPrice = this.ProductAndPriceFormGroup.controls.SellingPrice.value;
this.ProductsToSellTableRows.push(this.productView);
this.AssingDataToProductsToSellTable();
this.ProductAndPriceFormGroup.reset();
this.lowProductCount = false;
this.hasCampaign = false;
}
AssingDataToProductsToSellTable() {
this.ProductsToSellDataSource = this.ProductsToSellTableRows;
}
DeleteFromProductsToSellTable(row: ProductView) {
this.ProductsToSellTableRows = this.ProductsToSellTableRows.filter(fi => fi.TempId != row.TempId);
this.AssingDataToProductsToSellTable();
}
getGendere(gender: number) {
// 0 means Erkek, 1 means Kadin
return gender ? "Kadın" : "Erkek"
}
openSatisDialog() {
const dialogRef = this.dialog.open(PaymentScreenComponent, {
height: '600px',
width: '800px',
data: { Total: this.ProductsToSellTotalPrice = this.ProductsToSellDataSource.map(t => t.SellingPrice).reduce((acc, value) => +acc + +value, 0), CustomerInfoId: 0 }
});
this.unsubscribe.push(dialogRef.afterClosed().subscribe(async (result: PaymentPopup[]) => {
if (result?.length > 0) {
this.ProductsToSellDataSource = [];
let PaymentMethodIds: number[] = result.map(value => value.PaymentMethodId);
let ProductIds: number[] = this.ProductsToSellTableRows.map(value => value.Id);
let SellincPrices: number[] = this.ProductsToSellTableRows.map(value => value.SellingPrice);
let CampaignIds: number[] = this.ProductsToSellTableRows.map(value => value.CampaingId);
let salePaymentMethods: SalePaymentMethod[] = result.map(value => <SalePaymentMethod>{ Amount: value.Amount, DefferedPaymentCount: value.DefferedPaymentCount, PaymentMethodId: value.PaymentMethodId });
//UserId: this.userDetails.userId,
let ProductSellingDto: ProductSellingDto = { ProductIdsAndPricesAndCampaignIds: { SellingPrices: SellincPrices, ProductIds: ProductIds, CampaignIds: CampaignIds }, CustomerInfoId: result[0].CustomerInfo.Id, CustomerName: result[0].CustomerInfo.CustomerName, CustomerPhone: result[0].CustomerInfo.CustomerPhone, Receipt: result[0].Receipt, PaymentMethodIds: PaymentMethodIds, Total: this.ProductsToSellTotalPrice, UserId: this.selectedUserId, BranchId: this.ProductsToSellTableRows[0].BranchId, SalePaymentMethods: salePaymentMethods };
await this.normalSatisSerice.SellProducts(ProductSellingDto).pipe(tap(t => this.swal.showSuccessMessage())).toPromise();
this.ProductsToSellTableRows = [];
this.soledProductsGrid.instance.refresh();
}
}));
} | hasCampaign = false;
isProductCountEnough = false;
async productCodeFocusOut() {
this.PriceInput.nativeElement.focus();
const productCode = this.ProductAndPriceFormGroup.controls.ProductBarcode.value;
if (productCode && productCode.length == 12) {
let res: UIResponse<ProductView> = await this.normalSatisSerice.GetProductDetails(productCode).toPromise();
if (!res.IsError) {
this.isProductExist = true;
this.productView = res.Entity;
this.ProductAndPriceFormGroup.controls.SellingPrice.setValue(this.productView.SellingPrice);
this.productView.TempId = this.ProductsToSellTableId++;
let ProductCount = this.ProductsToSellDataSource.filter(fi => fi.Id == this.productView.Id).length;
this.productView.Count -= ProductCount;
if (this.productView.Count <= 10) {
this.lowProductCount = true;
} else {
this.lowProductCount = false;
}
if (this.productView.Count == 0) {
this.isProductCountEnough = false;
} else {
this.isProductCountEnough = true;
}
if (this.productView.CampaingId != 0) |
isProductExist = false;
lowProductCount = false; | random_line_split |
doc_zh_CN.go | ErrMessageTooLong is returned when attempting to encrypt a message which is too
// large for the size of the public key.
var ErrMessageTooLong = errors.New("crypto/rsa: message too long for RSA public key size")
// ErrVerification represents a failure to verify a signature. It is deliberately
// vague to avoid adaptive attacks.
var ErrVerification = errors.New("crypto/rsa: verification error")
// DecryptOAEP decrypts ciphertext using RSA-OAEP. If random != nil, DecryptOAEP
// uses RSA blinding to avoid timing side-channel attacks.
// DecryptOAEP解密RSA-OAEP算法加密的数据。如果random不是nil,函数会注意规避时间侧信道攻击。
func DecryptOAEP(hash hash.Hash, random io.Reader, priv *PrivateKey, ciphertext []byte, label []byte) (msg []byte, err error)
// DecryptPKCS1v15 decrypts a plaintext using RSA and the padding scheme from
// PKCS#1 v1.5. If rand != nil, it uses RSA blinding to avoid timing side-channel | func DecryptPKCS1v15(rand io.Reader, priv *PrivateKey, ciphertext []byte) (out []byte, err error)
// DecryptPKCS1v15SessionKey decrypts a session key using RSA and the padding
// scheme from PKCS#1 v1.5. If rand != nil, it uses RSA blinding to avoid timing
// side-channel attacks. It returns an error if the ciphertext is the wrong length
// or if the ciphertext is greater than the public modulus. Otherwise, no error is
// returned. If the padding is valid, the resulting plaintext message is copied
// into key. Otherwise, key is unchanged. These alternatives occur in constant
// time. It is intended that the user of this function generate a random session
// key beforehand and continue the protocol with the resulting value. This will
// remove any possibility that an attacker can learn any information about the
// plaintext. See ``Chosen Ciphertext Attacks Against Protocols Based on the RSA
// Encryption Standard PKCS #1'', Daniel Bleichenbacher, Advances in Cryptology
// (Crypto '98).
// DecryptPKCS1v15SessionKey使用PKCS#1
// v1.5规定的填充方案和RSA算法解密会话密钥。如果random不是nil,函数会注意规避时间侧信道攻击。
//
// 如果密文长度不对,或者如果密文比公共模数的长度还长,会返回错误;否则,不会返回任何错误。如果填充是合法的,生成的明文信息会拷贝进key;否则,key不会被修改。这些情况都会在固定时间内出现(规避时间侧信道攻击)。本函数的目的是让程序的使用者事先生成一个随机的会话密钥,并用运行时的值继续协议。这样可以避免任何攻击者从明文窃取信息的可能性。
//
// 参见”Chosen Ciphertext Attacks Against Protocols Based on the RSA Encryption
// Standard PKCS #1”。
func DecryptPKCS1v15SessionKey(rand io.Reader, priv *PrivateKey, ciphertext []byte, key []byte) (err error)
// EncryptOAEP encrypts the given message with RSA-OAEP. The message must be no
// longer than the length of the public modulus less twice the hash length plus 2.
// 采用RSA-OAEP算法加密给出的数据。数据不能超过((公共模数的长度)-2*( hash长度)+2)字节。
func EncryptOAEP(hash hash.Hash, random io.Reader, pub *PublicKey, msg []byte, label []byte) (out []byte, err error)
// EncryptPKCS1v15 encrypts the given message with RSA and the padding scheme from
// PKCS#1 v1.5. The message must be no longer than the length of the public modulus
// minus 11 bytes. WARNING: use of this function to encrypt plaintexts other than
// session keys is dangerous. Use RSA OAEP in new protocols.
// EncryptPKCS1v15使用PKCS#1
// v1.5规定的填充方案和RSA算法加密msg。信息不能超过((公共模数的长度)-11)字节。注意:使用本函数加密明文(而不是会话密钥)是危险的,请尽量在新协议中使用RSA
// OAEP。
func EncryptPKCS1v15(rand io.Reader, pub *PublicKey, msg []byte) (out []byte, err error)
// SignPKCS1v15 calculates the signature of hashed using RSASSA-PKCS1-V1_5-SIGN
// from RSA PKCS#1 v1.5. Note that hashed must be the result of hashing the input
// message using the given hash function. If hash is zero, hashed is signed
// directly. This isn't advisable except for interoperability.
// SignPKCS1v15使用RSA PKCS#1
// v1.5规定的RSASSA-PKCS1-V1_5-SIGN签名方案计算签名。注意hashed必须是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。
func SignPKCS1v15(rand io.Reader, priv *PrivateKey, hash crypto.Hash, hashed []byte) (s []byte, err error)
// SignPSS calculates the signature of hashed using RSASSA-PSS [1]. Note that
// hashed must be the result of hashing the input message using the given hash
// function. The opts argument may be nil, in which case sensible defaults are
// used.
// SignPSS采用RSASSA-PSS方案计算签名。注意hashed必须是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。opts参数可以为nil,此时会使用默认参数。
func SignPSS(rand io.Reader, priv *PrivateKey, hash crypto.Hash, hashed []byte, opts *PSSOptions) (s []byte, err error)
// VerifyPKCS1v15 verifies an RSA PKCS#1 v1.5 signature. hashed is the result of
// hashing the input message using the given hash function and sig is the
// signature. A valid signature is indicated by returning a nil error. If hash is
// zero then hashed is used directly. This isn't advisable except for
// interoperability.
// VerifyPKCS1v15认证RSA PKCS#1
// v1.5签名。hashed是使用提供的hash参数对(要签名的)原始数据进行hash的结果。合法的签名会返回nil,否则表示签名不合法。
func VerifyPKCS1v15(pub *PublicKey, hash crypto.Hash, hashed []byte, sig []byte) (err error)
// VerifyPSS verifies a PSS signature. hashed is the result of hashing the input
// message using the given hash function and sig is the signature. A valid
// signature is indicated by returning a nil error. The opts argument may be nil,
// in which case sensible defaults are used.
// VerifyPSS认证一个PSS签名。hashed是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。合法的签名会返回nil,否则表示签名不合法。opts参数可以为nil,此时会使用默认参数。
func VerifyPSS(pub *PublicKey, hash crypto.Hash, hashed []byte, sig []byte, opts *PSSOptions) error
// CRTValue contains the precomputed chinese remainder theorem values.
// CRTValue包含预先计算的中国剩余定理的值。
type CRTValue struct {
Exp *big.Int // D mod (prime-1).
Coeff *big.Int // R·Coeff ≡ 1 mod Prime.
R *big.Int // product of primes prior to this (inc p and q).
}
// PSSOptions contains options for creating and verifying PSS signatures.
// PSSOptions包含用于创建和认证PSS签名的参数。
type PSSOptions struct {
// SaltLength controls the length of the salt used in the PSS
// signature. It can either be a number of bytes, or one of the special
// PSSSaltLength constants.
SaltLength int
// Hash, if not zero, overrides the hash function passed to SignPSS.
// This is the only way to specify the hash function when using the
// crypto.Signer interface.
Hash crypto.Hash
}
// HashFunc returns pssOpts.Hash so that PSSOptions implements crypto.SignerOpts.
func (pssOpts *PSSOptions) HashFunc() crypto.Hash
type PrecomputedValues struct {
Dp, Dq *big.Int // D mod (P-1) (or mod Q-1)
Qinv *big.Int // Q^-1 mod P
// CRTValues is used for the 3rd and subsequent primes. Due to a
// historical accident, the CRT for the first two primes is handled
// differently in PKCS#1 and interoperability is sufficiently
// important that we mirror this.
CRTValues []CRTValue
}
// A PrivateKey represents an RSA key
// 代表一个RSA私钥。
type PrivateKey struct {
PublicKey // public part.
D *big.Int // private exponent
Primes []*big.Int // prime factors of N, has >= 2 elements.
// Precomputed contains precomputed values that | // attacks.
// DecryptPKCS1v15使用PKCS#1
// v1.5规定的填充方案和RSA算法解密密文。如果random不是nil,函数会注意规避时间侧信道攻击。 | random_line_split |
doc_zh_CN.go | TooLong is returned when attempting to encrypt a message which is too
// large for the size of the public key.
var ErrMessageTooLong = errors.New("crypto/rsa: message too long for RSA public key size")
// ErrVerification represents a failure to verify a signature. It is deliberately
// vague to avoid adaptive attacks.
var ErrVerification = errors.New("crypto/rsa: verification error")
// DecryptOAEP decrypts ciphertext using RSA-OAEP. If random != nil, DecryptOAEP
// uses RSA blinding to avoid timing side-channel attacks.
// DecryptOAEP解密RSA-OAEP算法加密的数据。如果random不是nil,函数会注意规避时间侧信道攻击。
func DecryptOAEP(hash hash.Hash, random io.Reader, priv *PrivateKey, ciphertext []byte, label []byte) (msg []byte, err error)
// DecryptPKCS1v15 decrypts a plaintext using RSA and the padding scheme from
// PKCS#1 v1.5. If rand != nil, it uses RSA blinding to avoid timing side-channel
// attacks.
// DecryptPKCS1v15使用PKCS#1
// v1.5规定的填充方案和RSA算法解密密文。如果random不是nil,函数会注意规避时间侧信道攻击。
func DecryptPKCS1v15(rand io.Reader, priv *PrivateKey, ciphertext []byte) (out []byte, err error)
// DecryptPKCS1v15SessionKey decrypts a session key using RSA and the padding
// scheme from PKCS#1 v1.5. If rand != nil, it uses RSA blinding to avoid timing
// side-channel attacks. It returns an error if the ciphertex | length
// or if the ciphertext is greater than the public modulus. Otherwise, no error is
// returned. If the padding is valid, the resulting plaintext message is copied
// into key. Otherwise, key is unchanged. These alternatives occur in constant
// time. It is intended that the user of this function generate a random session
// key beforehand and continue the protocol with the resulting value. This will
// remove any possibility that an attacker can learn any information about the
// plaintext. See ``Chosen Ciphertext Attacks Against Protocols Based on the RSA
// Encryption Standard PKCS #1'', Daniel Bleichenbacher, Advances in Cryptology
// (Crypto '98).
// DecryptPKCS1v15SessionKey使用PKCS#1
// v1.5规定的填充方案和RSA算法解密会话密钥。如果random不是nil,函数会注意规避时间侧信道攻击。
//
// 如果密文长度不对,或者如果密文比公共模数的长度还长,会返回错误;否则,不会返回任何错误。如果填充是合法的,生成的明文信息会拷贝进key;否则,key不会被修改。这些情况都会在固定时间内出现(规避时间侧信道攻击)。本函数的目的是让程序的使用者事先生成一个随机的会话密钥,并用运行时的值继续协议。这样可以避免任何攻击者从明文窃取信息的可能性。
//
// 参见”Chosen Ciphertext Attacks Against Protocols Based on the RSA Encryption
// Standard PKCS #1”。
func DecryptPKCS1v15SessionKey(rand io.Reader, priv *PrivateKey, ciphertext []byte, key []byte) (err error)
// EncryptOAEP encrypts the given message with RSA-OAEP. The message must be no
// longer than the length of the public modulus less twice the hash length plus 2.
// 采用RSA-OAEP算法加密给出的数据。数据不能超过((公共模数的长度)-2*( hash长度)+2)字节。
func EncryptOAEP(hash hash.Hash, random io.Reader, pub *PublicKey, msg []byte, label []byte) (out []byte, err error)
// EncryptPKCS1v15 encrypts the given message with RSA and the padding scheme from
// PKCS#1 v1.5. The message must be no longer than the length of the public modulus
// minus 11 bytes. WARNING: use of this function to encrypt plaintexts other than
// session keys is dangerous. Use RSA OAEP in new protocols.
// EncryptPKCS1v15使用PKCS#1
// v1.5规定的填充方案和RSA算法加密msg。信息不能超过((公共模数的长度)-11)字节。注意:使用本函数加密明文(而不是会话密钥)是危险的,请尽量在新协议中使用RSA
// OAEP。
func EncryptPKCS1v15(rand io.Reader, pub *PublicKey, msg []byte) (out []byte, err error)
// SignPKCS1v15 calculates the signature of hashed using RSASSA-PKCS1-V1_5-SIGN
// from RSA PKCS#1 v1.5. Note that hashed must be the result of hashing the input
// message using the given hash function. If hash is zero, hashed is signed
// directly. This isn't advisable except for interoperability.
// SignPKCS1v15使用RSA PKCS#1
// v1.5规定的RSASSA-PKCS1-V1_5-SIGN签名方案计算签名。注意hashed必须是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。
func SignPKCS1v15(rand io.Reader, priv *PrivateKey, hash crypto.Hash, hashed []byte) (s []byte, err error)
// SignPSS calculates the signature of hashed using RSASSA-PSS [1]. Note that
// hashed must be the result of hashing the input message using the given hash
// function. The opts argument may be nil, in which case sensible defaults are
// used.
// SignPSS采用RSASSA-PSS方案计算签名。注意hashed必须是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。opts参数可以为nil,此时会使用默认参数。
func SignPSS(rand io.Reader, priv *PrivateKey, hash crypto.Hash, hashed []byte, opts *PSSOptions) (s []byte, err error)
// VerifyPKCS1v15 verifies an RSA PKCS#1 v1.5 signature. hashed is the result of
// hashing the input message using the given hash function and sig is the
// signature. A valid signature is indicated by returning a nil error. If hash is
// zero then hashed is used directly. This isn't advisable except for
// interoperability.
// VerifyPKCS1v15认证RSA PKCS#1
// v1.5签名。hashed是使用提供的hash参数对(要签名的)原始数据进行hash的结果。合法的签名会返回nil,否则表示签名不合法。
func VerifyPKCS1v15(pub *PublicKey, hash crypto.Hash, hashed []byte, sig []byte) (err error)
// VerifyPSS verifies a PSS signature. hashed is the result of hashing the input
// message using the given hash function and sig is the signature. A valid
// signature is indicated by returning a nil error. The opts argument may be nil,
// in which case sensible defaults are used.
// VerifyPSS认证一个PSS签名。hashed是使用提供给本函数的hash参数对(要签名的)原始数据进行hash的结果。合法的签名会返回nil,否则表示签名不合法。opts参数可以为nil,此时会使用默认参数。
func VerifyPSS(pub *PublicKey, hash crypto.Hash, hashed []byte, sig []byte, opts *PSSOptions) error
// CRTValue contains the precomputed chinese remainder theorem values.
// CRTValue包含预先计算的中国剩余定理的值。
type CRTValue struct {
Exp *big.Int // D mod (prime-1).
Coeff *big.Int // R·Coeff ≡ 1 mod Prime.
R *big.Int // product of primes prior to this (inc p and q).
}
// PSSOptions contains options for creating and verifying PSS signatures.
// PSSOptions包含用于创建和认证PSS签名的参数。
type PSSOptions struct {
// SaltLength controls the length of the salt used in the PSS
// signature. It can either be a number of bytes, or one of the special
// PSSSaltLength constants.
SaltLength int
// Hash, if not zero, overrides the hash function passed to SignPSS.
// This is the only way to specify the hash function when using the
// crypto.Signer interface.
Hash crypto.Hash
}
// HashFunc returns pssOpts.Hash so that PSSOptions implements crypto.SignerOpts.
func (pssOpts *PSSOptions) HashFunc() crypto.Hash
type PrecomputedValues struct {
Dp, Dq *big.Int // D mod (P-1) (or mod Q-1)
Qinv *big.Int // Q^-1 mod P
// CRTValues is used for the 3rd and subsequent primes. Due to a
// historical accident, the CRT for the first two primes is handled
// differently in PKCS#1 and interoperability is sufficiently
// important that we mirror this.
CRTValues []CRTValue
}
// A PrivateKey represents an RSA key
// 代表一个RSA私钥。
type PrivateKey struct {
PublicKey // public part.
D *big.Int // private exponent
Primes []*big.Int // prime factors of N, has >= 2 elements.
// Precomputed contains | t is the wrong | identifier_name |
types.rs | <T: Encodable + Decodable + Send + Clone> {
current_term: u64,
voted_for: Option<u64>, // request_vote cares if this is `None`
log: File,
last_index: u64, // The last index of the file.
last_term: u64, // The last index of the file.
marker: marker::PhantomData<T>, // A marker... Because of
// https://github.com/rust-lang/rfcs/blob/master/text/0738-variance.md#the-corner-case-unused-parameters-and-parameters-that-are-only-used-unsafely
}
impl<T: Encodable + Decodable + Send + Clone> PersistentState<T> {
pub fn new(current_term: u64, log_path: Path) -> PersistentState<T> {
let mut open_opts = OpenOptions::new();
open_opts.read(true);
open_opts.write(true);
open_opts.create(true);
let mut file = open_opts.open(&log_path).unwrap();
write!(&mut file, "{:20} {:20}\n", current_term, 0).unwrap();
PersistentState {
current_term: current_term,
voted_for: None,
log: file,
last_index: 0,
last_term: 0,
marker: marker::PhantomData,
}
}
/// Gets the `last_index` which you can use to make append requests with.
pub fn get_last_index(&self) -> u64 { self.last_index }
pub fn get_last_term(&self) -> u64 { self.last_term }
/// Gets the `current_term` which is used for request vote.
pub fn get_current_term(&self) -> u64 { self.current_term }
/// Sets the current_term. **This should reflect on stable storage.**
pub fn set_current_term(&mut self, new: u64) -> io::Result<()> {
// The first line is the header with `current_term`, `voted_for`.
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
self.current_term = new;
// TODO: What do we do about the none case?
write!(&mut self.log, "{:20} {:20}\n", self.current_term, self.voted_for.unwrap_or(0))
}
/// Increments the current_term. **This should reflect on stable storage.**
pub fn inc_current_term(&mut self) { self.current_term += 1 }
/// Gets the `voted_for`.
pub fn get_voted_for(&mut self) -> Option<u64> { self.voted_for }
/// Sets the `voted_for. **This should reflect on stable storage.**
pub fn set_voted_for(&mut self, new: Option<u64>) -> io::Result<()> {
// The first line is the header with `current_term`, `voted_for`.
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
self.voted_for = new;
// TODO: What do we do about the none case?
write!(&mut self.log, "{:20} {:20}\n", self.current_term, self.voted_for.unwrap_or(0))
}
pub fn append_entries(&mut self, prev_log_index: u64, prev_log_term: u64,
entries: Vec<(u64, T)>) -> io::Result<()> {
// TODO: No checking of `prev_log_index` & `prev_log_term` yet... Do we need to?
let position = try!(self.move_to(prev_log_index + 1));
let number = entries.len();
let last_term = entries[entries.len() -1].0;
try!(self.purge_from_bytes(position)); // Update `last_log_index` later.
// TODO: Possibly purge.
for (term, entry) in entries {
// TODO: I don't like the "doubling" here. How can we do this better?
write!(&mut self.log, "{} {}\n", term, PersistentState::encode(entry));
}
self.last_index = if prev_log_index == 0 {
number as u64 - 1
} else { prev_log_index + number as u64 };
self.last_term = last_term;
Ok(())
}
fn encode(entry: T) -> String {
let json_encoded = json::encode(&entry)
.unwrap(); // TODO: Don't unwrap.
json_encoded.as_bytes().to_base64(Config {
char_set: CharacterSet::UrlSafe,
newline: Newline::LF,
pad: false,
line_length: None,
})
}
fn decode(bytes: String) -> Result<T, rustc_serialize::json::DecoderError> {
let based = bytes.from_base64()
.ok().expect("Decoding error. log likely corrupt.");
let string = str::from_utf8(based.as_slice())
.unwrap();
json::decode::<T>(string)
}
/// Returns the number of bytes containing `line` lines.
/// TODO: Cache?
fn move_to(&mut self, line: u64) -> io::Result<u64> {
// Gotcha: The first line is NOT a log entry.
let mut lines_read = 0u64;
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
// Go until we've reached `from` new lines.
let _ = self.log.by_ref().chars().skip_while(|opt| {
match *opt {
Ok(val) => {
if val == '\n' {
lines_read += 1;
if lines_read > line { // Greater than here because the first line is a bust.
false // At right location.
} else {
true // Not done yet, more lines to go.
}
} else {
true // Not a new line.
}
},
_ => false // At EOF. Nothing to purge.
}
}).next(); // Side effects.
self.log.seek(io::SeekFrom::Current(0)) // Where are we?
}
/// Do *not* invoke this unless you update the `last_index`!
fn purge_from_bytes(&mut self, from_bytes: u64) -> io::Result<()> {
self.log.set_len(from_bytes) // Chop off the file at the given position.
}
/// Removes all entries from `from` to the last entry, inclusively.
pub fn purge_from_index(&mut self, from_line: u64) -> io::Result<()> {
let position = try!(self.move_to(from_line));
self.last_index = from_line - 1;
self.purge_from_bytes(position)
}
pub fn retrieve_entries(&mut self, start: u64, end: u64) -> io::Result<Vec<(u64, T)>> {
let position = self.move_to(start);
let mut index = start;
let mut out = vec![];
let mut read_in = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()); // We don't really care about issues here.
for index in range(start, end +1) {
let mut chars = read_in.by_ref()
.take_while(|&val| val != '\n')
.collect::<String>();
if chars.len() == 0 { continue; }
let entry = try!(parse_entry::<T>(chars));
out.push(entry);
}
Ok(out)
}
pub fn retrieve_entry(&mut self, index: u64) -> io::Result<(u64, T)> {
let position = self.move_to(index);
let mut chars = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()) // We don't really care about issues here.
.take_while(|&val| val != '\n').collect::<String>();
parse_entry::<T>(chars)
}
}
fn parse_entry<T: Encodable + Decodable + Send + Clone>(val: String) -> io::Result<(u64, T)> {
let mut splits = val.split(' ');
let term = {
let chunk = splits.next()
.and_then(|v| v.parse::<u64>().ok());
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse term.", None)),
}
};
let encoded = {
let chunk = splits.next();
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse encoded data.", None)),
}
};
let decoded: T = PersistentState::decode(encoded.to_string())
.ok().expect("Could not unwrap log entry.");
Ok((term, decoded))
}
/// Volatile state
#[derive(Copy)]
pub struct VolatileState {
pub commit_index: u64,
pub last_applied: u64
}
/// Leader Only
/// **Reinitialized after election.**
#[derive(PartialEq, Eq, Clone)]
pub struct LeaderState {
pub next_index: Vec<u64>,
pub match_index: Vec | PersistentState | identifier_name |
|
types.rs | pub fn append_entries(&mut self, prev_log_index: u64, prev_log_term: u64,
entries: Vec<(u64, T)>) -> io::Result<()> {
// TODO: No checking of `prev_log_index` & `prev_log_term` yet... Do we need to?
let position = try!(self.move_to(prev_log_index + 1));
let number = entries.len();
let last_term = entries[entries.len() -1].0;
try!(self.purge_from_bytes(position)); // Update `last_log_index` later.
// TODO: Possibly purge.
for (term, entry) in entries {
// TODO: I don't like the "doubling" here. How can we do this better?
write!(&mut self.log, "{} {}\n", term, PersistentState::encode(entry));
}
self.last_index = if prev_log_index == 0 {
number as u64 - 1
} else { prev_log_index + number as u64 };
self.last_term = last_term;
Ok(())
}
fn encode(entry: T) -> String {
let json_encoded = json::encode(&entry)
.unwrap(); // TODO: Don't unwrap.
json_encoded.as_bytes().to_base64(Config {
char_set: CharacterSet::UrlSafe,
newline: Newline::LF,
pad: false,
line_length: None,
})
}
fn decode(bytes: String) -> Result<T, rustc_serialize::json::DecoderError> {
let based = bytes.from_base64()
.ok().expect("Decoding error. log likely corrupt.");
let string = str::from_utf8(based.as_slice())
.unwrap();
json::decode::<T>(string)
}
/// Returns the number of bytes containing `line` lines.
/// TODO: Cache?
fn move_to(&mut self, line: u64) -> io::Result<u64> {
// Gotcha: The first line is NOT a log entry.
let mut lines_read = 0u64;
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
// Go until we've reached `from` new lines.
let _ = self.log.by_ref().chars().skip_while(|opt| {
match *opt {
Ok(val) => {
if val == '\n' {
lines_read += 1;
if lines_read > line { // Greater than here because the first line is a bust.
false // At right location.
} else {
true // Not done yet, more lines to go.
}
} else {
true // Not a new line.
}
},
_ => false // At EOF. Nothing to purge.
}
}).next(); // Side effects.
self.log.seek(io::SeekFrom::Current(0)) // Where are we?
}
/// Do *not* invoke this unless you update the `last_index`!
fn purge_from_bytes(&mut self, from_bytes: u64) -> io::Result<()> {
self.log.set_len(from_bytes) // Chop off the file at the given position.
}
/// Removes all entries from `from` to the last entry, inclusively.
pub fn purge_from_index(&mut self, from_line: u64) -> io::Result<()> {
let position = try!(self.move_to(from_line));
self.last_index = from_line - 1;
self.purge_from_bytes(position)
}
pub fn retrieve_entries(&mut self, start: u64, end: u64) -> io::Result<Vec<(u64, T)>> {
let position = self.move_to(start);
let mut index = start;
let mut out = vec![];
let mut read_in = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()); // We don't really care about issues here.
for index in range(start, end +1) {
let mut chars = read_in.by_ref()
.take_while(|&val| val != '\n')
.collect::<String>();
if chars.len() == 0 { continue; }
let entry = try!(parse_entry::<T>(chars));
out.push(entry);
}
Ok(out)
}
pub fn retrieve_entry(&mut self, index: u64) -> io::Result<(u64, T)> {
let position = self.move_to(index);
let mut chars = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()) // We don't really care about issues here.
.take_while(|&val| val != '\n').collect::<String>();
parse_entry::<T>(chars)
}
}
fn parse_entry<T: Encodable + Decodable + Send + Clone>(val: String) -> io::Result<(u64, T)> {
let mut splits = val.split(' ');
let term = {
let chunk = splits.next()
.and_then(|v| v.parse::<u64>().ok());
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse term.", None)),
}
};
let encoded = {
let chunk = splits.next();
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse encoded data.", None)),
}
};
let decoded: T = PersistentState::decode(encoded.to_string())
.ok().expect("Could not unwrap log entry.");
Ok((term, decoded))
}
/// Volatile state
#[derive(Copy)]
pub struct VolatileState {
pub commit_index: u64,
pub last_applied: u64
}
/// Leader Only
/// **Reinitialized after election.**
#[derive(PartialEq, Eq, Clone)]
pub struct LeaderState {
pub next_index: Vec<u64>,
pub match_index: Vec<u64>
}
/// Nodes can either be:
///
/// * A `Follower`, which replicates AppendEntries requests and votes for it's leader.
/// * A `Leader`, which leads the cluster by serving incoming requests, ensuring data is
/// replicated, and issuing heartbeats..
/// * A `Candidate`, which campaigns in an election and may become a `Leader` (if it gets enough
/// votes) or a `Follower`, if it hears from a `Leader`.
#[derive(PartialEq, Eq, Clone)]
pub enum NodeState {
Follower,
Leader(LeaderState),
Candidate(Vec<Transaction>),
}
#[derive(PartialEq, Eq, Clone)]
pub struct Transaction {
pub uuid: Uuid,
pub state: TransactionState,
}
/// Used to signify the state of of a Request/Response pair. This is only needed
/// on the original sender... not on the reciever.
#[derive(PartialEq, Eq, Copy, Clone)]
pub enum TransactionState {
Polling,
Accepted,
Rejected,
}
#[test]
fn test_persistent_state() {
let path = Path::new("/tmp/test_path");
fs::remove_file(&path.clone());
let mut state = PersistentState::new(0, path.clone());
// Add 0, 1
assert_eq!(state.append_entries(0, 0,
vec![(0, "Zero".to_string()),
(1, "One".to_string())]),
Ok(()));
// Check index.
assert_eq!(state.get_last_index(), 1);
// Check 0
assert_eq!(state.retrieve_entry(0),
Ok((0, "Zero".to_string())));
// Check 0, 1
assert_eq!(state.retrieve_entries(0, 1),
Ok(vec![(0, "Zero".to_string()),
(1, "One".to_string())
]));
// Check 1
assert_eq!(state.retrieve_entry(1),
Ok((1, "One".to_string())));
// Add 2, 3
assert_eq!(state.append_entries(1, 2,
vec![(2, "Two".to_string()),
(3, "Three".to_string())]),
Ok(()));
assert_eq!(state.get_last_index(), 3);
// Check 2, 3
assert_eq!(state.retrieve_entries(2, 3),
Ok(vec![(2, "Two".to_string()),
(3, "Three".to_string())
]));
// Remove 2, 3
assert_eq!(state.purge_from_index(2),
Ok(()));
assert_eq!(state.get_last_index(), 1);
// Check 3,4 are removed, and that code handles lack of entry gracefully.
assert_eq!(state.retrieve_entries(0, 4),
Ok(vec![(0, "Zero".to_string()),
(1, "One".to_string())
]));
// Add 2,3,4.
assert_eq!(state.append_entries(1, 2,
vec![(2, "Two".to_string()), | (3, "Three".to_string()),
(4, "Four".to_string())]),
Ok(()));
assert_eq!(state.get_last_index(), 4); | random_line_split |
|
types.rs | #the-corner-case-unused-parameters-and-parameters-that-are-only-used-unsafely
}
impl<T: Encodable + Decodable + Send + Clone> PersistentState<T> {
pub fn new(current_term: u64, log_path: Path) -> PersistentState<T> {
let mut open_opts = OpenOptions::new();
open_opts.read(true);
open_opts.write(true);
open_opts.create(true);
let mut file = open_opts.open(&log_path).unwrap();
write!(&mut file, "{:20} {:20}\n", current_term, 0).unwrap();
PersistentState {
current_term: current_term,
voted_for: None,
log: file,
last_index: 0,
last_term: 0,
marker: marker::PhantomData,
}
}
/// Gets the `last_index` which you can use to make append requests with.
pub fn get_last_index(&self) -> u64 { self.last_index }
pub fn get_last_term(&self) -> u64 { self.last_term }
/// Gets the `current_term` which is used for request vote.
pub fn get_current_term(&self) -> u64 { self.current_term }
/// Sets the current_term. **This should reflect on stable storage.**
pub fn set_current_term(&mut self, new: u64) -> io::Result<()> {
// The first line is the header with `current_term`, `voted_for`.
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
self.current_term = new;
// TODO: What do we do about the none case?
write!(&mut self.log, "{:20} {:20}\n", self.current_term, self.voted_for.unwrap_or(0))
}
/// Increments the current_term. **This should reflect on stable storage.**
pub fn inc_current_term(&mut self) { self.current_term += 1 }
/// Gets the `voted_for`.
pub fn get_voted_for(&mut self) -> Option<u64> { self.voted_for }
/// Sets the `voted_for. **This should reflect on stable storage.**
pub fn set_voted_for(&mut self, new: Option<u64>) -> io::Result<()> {
// The first line is the header with `current_term`, `voted_for`.
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
self.voted_for = new;
// TODO: What do we do about the none case?
write!(&mut self.log, "{:20} {:20}\n", self.current_term, self.voted_for.unwrap_or(0))
}
pub fn append_entries(&mut self, prev_log_index: u64, prev_log_term: u64,
entries: Vec<(u64, T)>) -> io::Result<()> {
// TODO: No checking of `prev_log_index` & `prev_log_term` yet... Do we need to?
let position = try!(self.move_to(prev_log_index + 1));
let number = entries.len();
let last_term = entries[entries.len() -1].0;
try!(self.purge_from_bytes(position)); // Update `last_log_index` later.
// TODO: Possibly purge.
for (term, entry) in entries {
// TODO: I don't like the "doubling" here. How can we do this better?
write!(&mut self.log, "{} {}\n", term, PersistentState::encode(entry));
}
self.last_index = if prev_log_index == 0 {
number as u64 - 1
} else | ;
self.last_term = last_term;
Ok(())
}
fn encode(entry: T) -> String {
let json_encoded = json::encode(&entry)
.unwrap(); // TODO: Don't unwrap.
json_encoded.as_bytes().to_base64(Config {
char_set: CharacterSet::UrlSafe,
newline: Newline::LF,
pad: false,
line_length: None,
})
}
fn decode(bytes: String) -> Result<T, rustc_serialize::json::DecoderError> {
let based = bytes.from_base64()
.ok().expect("Decoding error. log likely corrupt.");
let string = str::from_utf8(based.as_slice())
.unwrap();
json::decode::<T>(string)
}
/// Returns the number of bytes containing `line` lines.
/// TODO: Cache?
fn move_to(&mut self, line: u64) -> io::Result<u64> {
// Gotcha: The first line is NOT a log entry.
let mut lines_read = 0u64;
self.log.seek(io::SeekFrom::Start(0)); // Take the start.
// Go until we've reached `from` new lines.
let _ = self.log.by_ref().chars().skip_while(|opt| {
match *opt {
Ok(val) => {
if val == '\n' {
lines_read += 1;
if lines_read > line { // Greater than here because the first line is a bust.
false // At right location.
} else {
true // Not done yet, more lines to go.
}
} else {
true // Not a new line.
}
},
_ => false // At EOF. Nothing to purge.
}
}).next(); // Side effects.
self.log.seek(io::SeekFrom::Current(0)) // Where are we?
}
/// Do *not* invoke this unless you update the `last_index`!
fn purge_from_bytes(&mut self, from_bytes: u64) -> io::Result<()> {
self.log.set_len(from_bytes) // Chop off the file at the given position.
}
/// Removes all entries from `from` to the last entry, inclusively.
pub fn purge_from_index(&mut self, from_line: u64) -> io::Result<()> {
let position = try!(self.move_to(from_line));
self.last_index = from_line - 1;
self.purge_from_bytes(position)
}
pub fn retrieve_entries(&mut self, start: u64, end: u64) -> io::Result<Vec<(u64, T)>> {
let position = self.move_to(start);
let mut index = start;
let mut out = vec![];
let mut read_in = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()); // We don't really care about issues here.
for index in range(start, end +1) {
let mut chars = read_in.by_ref()
.take_while(|&val| val != '\n')
.collect::<String>();
if chars.len() == 0 { continue; }
let entry = try!(parse_entry::<T>(chars));
out.push(entry);
}
Ok(out)
}
pub fn retrieve_entry(&mut self, index: u64) -> io::Result<(u64, T)> {
let position = self.move_to(index);
let mut chars = self.log.by_ref()
.chars()
.take_while(|val| val.is_ok())
.filter_map(|val| val.ok()) // We don't really care about issues here.
.take_while(|&val| val != '\n').collect::<String>();
parse_entry::<T>(chars)
}
}
fn parse_entry<T: Encodable + Decodable + Send + Clone>(val: String) -> io::Result<(u64, T)> {
let mut splits = val.split(' ');
let term = {
let chunk = splits.next()
.and_then(|v| v.parse::<u64>().ok());
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse term.", None)),
}
};
let encoded = {
let chunk = splits.next();
match chunk {
Some(v) => v,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not parse encoded data.", None)),
}
};
let decoded: T = PersistentState::decode(encoded.to_string())
.ok().expect("Could not unwrap log entry.");
Ok((term, decoded))
}
/// Volatile state
#[derive(Copy)]
pub struct VolatileState {
pub commit_index: u64,
pub last_applied: u64
}
/// Leader Only
/// **Reinitialized after election.**
#[derive(PartialEq, Eq, Clone)]
pub struct LeaderState {
pub next_index: Vec<u64>,
pub match_index: Vec<u64>
}
/// Nodes can either be:
///
/// * A `Follower`, which replicates AppendEntries requests and votes for it's leader.
/// * A `Leader`, which leads the cluster by serving incoming requests, ensuring data is
/// replicated, and issuing heartbeats..
/// * A `Candidate`, which campaigns in an election and may become a `Leader` (if it gets enough
/// votes) or a `Follower`, if it hears from a `Leader`.
#[derive(PartialEq, Eq, Clone)]
pub enum NodeState | { prev_log_index + number as u64 } | conditional_block |
sync.go |
}
type timestampedError struct {
t time.Time
err error
}
func createSyncHandler(fromName, toName string,
from blobserver.Storage, to blobserver.BlobReceiver,
queue sorted.KeyValue, isToIndex bool) (*SyncHandler, error) {
h := &SyncHandler{
copierPoolSize: 3,
from: from,
to: to,
fromName: fromName,
toName: toName,
queue: queue,
toIndex: isToIndex,
blobc: make(chan blob.SizedRef, 8),
status: "not started",
blobStatus: make(map[string]fmt.Stringer),
}
return h, nil
}
func createIdleSyncHandler(fromName, toName string) (*SyncHandler, error) {
h := &SyncHandler{
fromName: fromName,
toName: toName,
idle: true,
status: "disabled",
}
return h, nil
}
func (sh *SyncHandler) discoveryMap() map[string]interface{} {
// TODO(mpl): more status info
return map[string]interface{}{
"from": sh.fromName,
"to": sh.toName,
"toIndex": sh.toIndex,
}
}
func (sh *SyncHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
sh.lk.Lock()
defer sh.lk.Unlock()
fmt.Fprintf(rw, "<h1>%s to %s Sync Status</h1><p><b>Current status: </b>%s</p>",
sh.fromName, sh.toName, html.EscapeString(sh.status))
if sh.idle {
return
}
fmt.Fprintf(rw, "<h2>Stats:</h2><ul>")
fmt.Fprintf(rw, "<li>Blobs copied: %d</li>", sh.totalCopies)
fmt.Fprintf(rw, "<li>Bytes copied: %d</li>", sh.totalCopyBytes)
if !sh.recentCopyTime.IsZero() {
fmt.Fprintf(rw, "<li>Most recent copy: %s</li>", sh.recentCopyTime.Format(time.RFC3339))
}
fmt.Fprintf(rw, "<li>Copy errors: %d</li>", sh.totalErrors)
fmt.Fprintf(rw, "</ul>")
if len(sh.blobStatus) > 0 {
fmt.Fprintf(rw, "<h2>Current Copies:</h2><ul>")
for blobstr, sfn := range sh.blobStatus {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
blobstr, html.EscapeString(sfn.String()))
}
fmt.Fprintf(rw, "</ul>")
}
if len(sh.recentErrors) > 0 {
fmt.Fprintf(rw, "<h2>Recent Errors:</h2><ul>")
for _, te := range sh.recentErrors {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
te.t.Format(time.RFC3339),
html.EscapeString(te.err.Error()))
}
fmt.Fprintf(rw, "</ul>")
}
}
func (sh *SyncHandler) setStatus(s string, args ...interface{}) {
s = time.Now().UTC().Format(time.RFC3339) + ": " + fmt.Sprintf(s, args...)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.status = s
}
func (sh *SyncHandler) setBlobStatus(blobref string, s fmt.Stringer) {
sh.lk.Lock()
defer sh.lk.Unlock()
if s != nil {
sh.blobStatus[blobref] = s
} else {
delete(sh.blobStatus, blobref)
}
}
func (sh *SyncHandler) addErrorToLog(err error) {
sh.logf("%v", err)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.recentErrors = append(sh.recentErrors, timestampedError{time.Now().UTC(), err})
if len(sh.recentErrors) > maxErrors {
// Kinda lame, but whatever. Only for errors, rare.
copy(sh.recentErrors[:maxErrors], sh.recentErrors[1:maxErrors+1])
sh.recentErrors = sh.recentErrors[:maxErrors]
}
}
type copyResult struct {
sb blob.SizedRef
err error
}
func blobserverEnumerator(ctx *context.Context, src blobserver.BlobEnumerator) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
return blobserver.EnumerateAll(ctx, src, func(sb blob.SizedRef) error {
select {
case dst <- sb:
case <-intr:
return errors.New("interrupted")
}
return nil
})
}
}
func (sh *SyncHandler) enumerateQueuedBlobs(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
it := sh.queue.Find("", "")
for it.Next() {
br, ok := blob.Parse(it.Key())
size, err := strconv.ParseInt(it.Value(), 10, 64)
if !ok || err != nil {
sh.logf("ERROR: bogus sync queue entry: %q => %q", it.Key(), it.Value())
continue
}
select {
case dst <- blob.SizedRef{br, size}:
case <-intr:
return it.Close()
}
}
return it.Close()
}
func (sh *SyncHandler) enumerateBlobc(first blob.SizedRef) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
dst <- first
for {
select {
case sb := <-sh.blobc:
dst <- sb
default:
return nil
}
}
}
}
func (sh *SyncHandler) runSync(srcName string, enumSrc func(chan<- blob.SizedRef, <-chan struct{}) error) int {
enumch := make(chan blob.SizedRef, 8)
errch := make(chan error, 1)
intr := make(chan struct{})
defer close(intr)
go func() { errch <- enumSrc(enumch, intr) }()
nCopied := 0
toCopy := 0
workch := make(chan blob.SizedRef, 1000)
resch := make(chan copyResult, 8)
for sb := range enumch {
toCopy++
workch <- sb
if toCopy <= sh.copierPoolSize {
go sh.copyWorker(resch, workch)
}
sh.setStatus("Enumerating queued blobs: %d", toCopy)
}
close(workch)
for i := 0; i < toCopy; i++ {
sh.setStatus("Copied %d/%d of batch of queued blobs", nCopied, toCopy)
res := <-resch
sh.lk.Lock()
if res.err == nil {
nCopied++
sh.totalCopies++
sh.totalCopyBytes += res.sb.Size
sh.recentCopyTime = time.Now().UTC()
} else {
sh.totalErrors++
}
sh.lk.Unlock()
}
if err := <-errch; err != nil {
sh.addErrorToLog(fmt.Errorf("replication error for source %q, enumerate from source: %v", srcName, err))
return nCopied
}
return nCopied
}
func (sh *SyncHandler) syncQueueLoop() {
for {
t0 := time.Now()
for sh.runSync(sh.fromName, sh.enumerateQueuedBlobs) > 0 {
// Loop, before sleeping.
}
sh.setStatus("Sleeping briefly before next long poll.")
d := queueSyncInterval - time.Since(t0)
select {
case <-time.After(d):
case sb := <-sh.blobc:
// Blob arrived.
sh.runSync(sh.fromName, sh.enumerateBlobc(sb))
}
}
}
func (sh *SyncHandler) copyWorker(res chan<- copyResult, work <-chan blob.SizedRef) {
for sb := range work {
res <- copyResult{sb, sh.copyBlob(sb, 0)}
}
}
type statusFunc func() string
func (sf statusFunc) String() string { return sf() }
type status string
func (s status) String() string { return string(s) }
func (sh *SyncHandler) copyBlob(sb blob.SizedRef, tryCount int) error {
key := sb.Ref.String()
set := func(s fmt.Stringer) {
sh.setBlobStatus(key, s)
}
defer set(nil)
errorf := func(s string, args ...interface{}) error {
// TODO: increment error stats
err := fmt.Errorf("replication error for blob %s: "+s,
append([]interface{}{sb.Ref}, args...)...)
sh.addErrorToLog(err)
return err
}
set(status("sending GET to source"))
rc, fromSize, err := sh.from.FetchStreaming(sb.Ref)
if err != nil {
return errorf("source fetch: %v", err)
}
defer rc.Close()
if fromSize != sb.Size | {
return errorf("source fetch size mismatch: get=%d, enumerate=%d", fromSize, sb.Size)
} | conditional_block |
|
sync.go | ) String() string {
return fmt.Sprintf("[SyncHandler %v -> %v]", sh.fromName, sh.toName)
}
func (sh *SyncHandler) logf(format string, args ...interface{}) {
log.Printf(sh.String()+" "+format, args...)
}
var (
_ blobserver.Storage = (*SyncHandler)(nil)
_ blobserver.HandlerIniter = (*SyncHandler)(nil)
)
func init() {
blobserver.RegisterHandlerConstructor("sync", newSyncFromConfig)
}
func newSyncFromConfig(ld blobserver.Loader, conf jsonconfig.Obj) (http.Handler, error) {
var (
from = conf.RequiredString("from")
to = conf.RequiredString("to")
fullSync = conf.OptionalBool("fullSyncOnStart", false)
blockFullSync = conf.OptionalBool("blockingFullSyncOnStart", false)
idle = conf.OptionalBool("idle", false)
queueConf = conf.OptionalObject("queue")
)
if err := conf.Validate(); err != nil {
return nil, err
}
if idle {
synch, err := createIdleSyncHandler(from, to)
if err != nil {
return nil, err
}
return synch, nil
}
if len(queueConf) == 0 {
return nil, errors.New(`Missing required "queue" object`)
}
q, err := sorted.NewKeyValue(queueConf)
if err != nil {
return nil, err
}
isToIndex := false
fromBs, err := ld.GetStorage(from)
if err != nil {
return nil, err
}
toBs, err := ld.GetStorage(to)
if err != nil {
return nil, err
}
if _, ok := fromBs.(*index.Index); !ok {
if _, ok := toBs.(*index.Index); ok {
isToIndex = true
}
}
sh, err := createSyncHandler(from, to, fromBs, toBs, q, isToIndex)
if err != nil {
return nil, err
}
if fullSync || blockFullSync {
didFullSync := make(chan bool, 1)
go func() {
n := sh.runSync("queue", sh.enumerateQueuedBlobs)
sh.logf("Queue sync copied %d blobs", n)
n = sh.runSync("full", blobserverEnumerator(context.TODO(), fromBs))
sh.logf("Full sync copied %d blobs", n)
didFullSync <- true
sh.syncQueueLoop()
}()
if blockFullSync {
sh.logf("Blocking startup, waiting for full sync from %q to %q", from, to)
<-didFullSync
sh.logf("Full sync complete.")
}
} else {
go sh.syncQueueLoop()
}
blobserver.GetHub(fromBs).AddReceiveHook(sh.enqueue)
return sh, nil
}
func (sh *SyncHandler) InitHandler(hl blobserver.FindHandlerByTyper) error {
_, h, err := hl.FindHandlerByType("root")
if err == blobserver.ErrHandlerTypeNotFound {
// It's optional. We register ourselves if it's there.
return nil
}
if err != nil {
return err
}
h.(*RootHandler).registerSyncHandler(sh)
return nil
}
type timestampedError struct {
t time.Time
err error
}
func createSyncHandler(fromName, toName string,
from blobserver.Storage, to blobserver.BlobReceiver,
queue sorted.KeyValue, isToIndex bool) (*SyncHandler, error) {
h := &SyncHandler{
copierPoolSize: 3,
from: from,
to: to,
fromName: fromName,
toName: toName,
queue: queue,
toIndex: isToIndex,
blobc: make(chan blob.SizedRef, 8),
status: "not started",
blobStatus: make(map[string]fmt.Stringer),
}
return h, nil
}
func createIdleSyncHandler(fromName, toName string) (*SyncHandler, error) {
h := &SyncHandler{
fromName: fromName,
toName: toName,
idle: true,
status: "disabled",
}
return h, nil
}
func (sh *SyncHandler) discoveryMap() map[string]interface{} {
// TODO(mpl): more status info
return map[string]interface{}{
"from": sh.fromName,
"to": sh.toName,
"toIndex": sh.toIndex,
}
}
func (sh *SyncHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
sh.lk.Lock()
defer sh.lk.Unlock()
| if sh.idle {
return
}
fmt.Fprintf(rw, "<h2>Stats:</h2><ul>")
fmt.Fprintf(rw, "<li>Blobs copied: %d</li>", sh.totalCopies)
fmt.Fprintf(rw, "<li>Bytes copied: %d</li>", sh.totalCopyBytes)
if !sh.recentCopyTime.IsZero() {
fmt.Fprintf(rw, "<li>Most recent copy: %s</li>", sh.recentCopyTime.Format(time.RFC3339))
}
fmt.Fprintf(rw, "<li>Copy errors: %d</li>", sh.totalErrors)
fmt.Fprintf(rw, "</ul>")
if len(sh.blobStatus) > 0 {
fmt.Fprintf(rw, "<h2>Current Copies:</h2><ul>")
for blobstr, sfn := range sh.blobStatus {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
blobstr, html.EscapeString(sfn.String()))
}
fmt.Fprintf(rw, "</ul>")
}
if len(sh.recentErrors) > 0 {
fmt.Fprintf(rw, "<h2>Recent Errors:</h2><ul>")
for _, te := range sh.recentErrors {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
te.t.Format(time.RFC3339),
html.EscapeString(te.err.Error()))
}
fmt.Fprintf(rw, "</ul>")
}
}
func (sh *SyncHandler) setStatus(s string, args ...interface{}) {
s = time.Now().UTC().Format(time.RFC3339) + ": " + fmt.Sprintf(s, args...)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.status = s
}
func (sh *SyncHandler) setBlobStatus(blobref string, s fmt.Stringer) {
sh.lk.Lock()
defer sh.lk.Unlock()
if s != nil {
sh.blobStatus[blobref] = s
} else {
delete(sh.blobStatus, blobref)
}
}
func (sh *SyncHandler) addErrorToLog(err error) {
sh.logf("%v", err)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.recentErrors = append(sh.recentErrors, timestampedError{time.Now().UTC(), err})
if len(sh.recentErrors) > maxErrors {
// Kinda lame, but whatever. Only for errors, rare.
copy(sh.recentErrors[:maxErrors], sh.recentErrors[1:maxErrors+1])
sh.recentErrors = sh.recentErrors[:maxErrors]
}
}
type copyResult struct {
sb blob.SizedRef
err error
}
func blobserverEnumerator(ctx *context.Context, src blobserver.BlobEnumerator) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
return blobserver.EnumerateAll(ctx, src, func(sb blob.SizedRef) error {
select {
case dst <- sb:
case <-intr:
return errors.New("interrupted")
}
return nil
})
}
}
func (sh *SyncHandler) enumerateQueuedBlobs(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
it := sh.queue.Find("", "")
for it.Next() {
br, ok := blob.Parse(it.Key())
size, err := strconv.ParseInt(it.Value(), 10, 64)
if !ok || err != nil {
sh.logf("ERROR: bogus sync queue entry: %q => %q", it.Key(), it.Value())
continue
}
select {
case dst <- blob.SizedRef{br, size}:
case <-intr:
return it.Close()
}
}
return it.Close()
}
func (sh *SyncHandler) enumerateBlobc(first blob.SizedRef) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
dst <- first
for {
select {
case sb := <-sh.blobc:
dst <- sb
default:
return nil
}
}
}
}
func (sh *SyncHandler) runSync(srcName string, enumSrc func(chan<- blob.SizedRef, <-chan struct{}) error) int {
enumch := make(chan blob.SizedRef, 8 | fmt.Fprintf(rw, "<h1>%s to %s Sync Status</h1><p><b>Current status: </b>%s</p>",
sh.fromName, sh.toName, html.EscapeString(sh.status)) | random_line_split |
sync.go | ) String() string {
return fmt.Sprintf("[SyncHandler %v -> %v]", sh.fromName, sh.toName)
}
func (sh *SyncHandler) logf(format string, args ...interface{}) {
log.Printf(sh.String()+" "+format, args...)
}
var (
_ blobserver.Storage = (*SyncHandler)(nil)
_ blobserver.HandlerIniter = (*SyncHandler)(nil)
)
func init() {
blobserver.RegisterHandlerConstructor("sync", newSyncFromConfig)
}
func newSyncFromConfig(ld blobserver.Loader, conf jsonconfig.Obj) (http.Handler, error) {
var (
from = conf.RequiredString("from")
to = conf.RequiredString("to")
fullSync = conf.OptionalBool("fullSyncOnStart", false)
blockFullSync = conf.OptionalBool("blockingFullSyncOnStart", false)
idle = conf.OptionalBool("idle", false)
queueConf = conf.OptionalObject("queue")
)
if err := conf.Validate(); err != nil {
return nil, err
}
if idle {
synch, err := createIdleSyncHandler(from, to)
if err != nil {
return nil, err
}
return synch, nil
}
if len(queueConf) == 0 {
return nil, errors.New(`Missing required "queue" object`)
}
q, err := sorted.NewKeyValue(queueConf)
if err != nil {
return nil, err
}
isToIndex := false
fromBs, err := ld.GetStorage(from)
if err != nil {
return nil, err
}
toBs, err := ld.GetStorage(to)
if err != nil {
return nil, err
}
if _, ok := fromBs.(*index.Index); !ok {
if _, ok := toBs.(*index.Index); ok {
isToIndex = true
}
}
sh, err := createSyncHandler(from, to, fromBs, toBs, q, isToIndex)
if err != nil {
return nil, err
}
if fullSync || blockFullSync {
didFullSync := make(chan bool, 1)
go func() {
n := sh.runSync("queue", sh.enumerateQueuedBlobs)
sh.logf("Queue sync copied %d blobs", n)
n = sh.runSync("full", blobserverEnumerator(context.TODO(), fromBs))
sh.logf("Full sync copied %d blobs", n)
didFullSync <- true
sh.syncQueueLoop()
}()
if blockFullSync {
sh.logf("Blocking startup, waiting for full sync from %q to %q", from, to)
<-didFullSync
sh.logf("Full sync complete.")
}
} else {
go sh.syncQueueLoop()
}
blobserver.GetHub(fromBs).AddReceiveHook(sh.enqueue)
return sh, nil
}
func (sh *SyncHandler) InitHandler(hl blobserver.FindHandlerByTyper) error {
_, h, err := hl.FindHandlerByType("root")
if err == blobserver.ErrHandlerTypeNotFound {
// It's optional. We register ourselves if it's there.
return nil
}
if err != nil {
return err
}
h.(*RootHandler).registerSyncHandler(sh)
return nil
}
type timestampedError struct {
t time.Time
err error
}
func createSyncHandler(fromName, toName string,
from blobserver.Storage, to blobserver.BlobReceiver,
queue sorted.KeyValue, isToIndex bool) (*SyncHandler, error) {
h := &SyncHandler{
copierPoolSize: 3,
from: from,
to: to,
fromName: fromName,
toName: toName,
queue: queue,
toIndex: isToIndex,
blobc: make(chan blob.SizedRef, 8),
status: "not started",
blobStatus: make(map[string]fmt.Stringer),
}
return h, nil
}
func createIdleSyncHandler(fromName, toName string) (*SyncHandler, error) {
h := &SyncHandler{
fromName: fromName,
toName: toName,
idle: true,
status: "disabled",
}
return h, nil
}
func (sh *SyncHandler) discoveryMap() map[string]interface{} {
// TODO(mpl): more status info
return map[string]interface{}{
"from": sh.fromName,
"to": sh.toName,
"toIndex": sh.toIndex,
}
}
func (sh *SyncHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
sh.lk.Lock()
defer sh.lk.Unlock()
fmt.Fprintf(rw, "<h1>%s to %s Sync Status</h1><p><b>Current status: </b>%s</p>",
sh.fromName, sh.toName, html.EscapeString(sh.status))
if sh.idle {
return
}
fmt.Fprintf(rw, "<h2>Stats:</h2><ul>")
fmt.Fprintf(rw, "<li>Blobs copied: %d</li>", sh.totalCopies)
fmt.Fprintf(rw, "<li>Bytes copied: %d</li>", sh.totalCopyBytes)
if !sh.recentCopyTime.IsZero() {
fmt.Fprintf(rw, "<li>Most recent copy: %s</li>", sh.recentCopyTime.Format(time.RFC3339))
}
fmt.Fprintf(rw, "<li>Copy errors: %d</li>", sh.totalErrors)
fmt.Fprintf(rw, "</ul>")
if len(sh.blobStatus) > 0 {
fmt.Fprintf(rw, "<h2>Current Copies:</h2><ul>")
for blobstr, sfn := range sh.blobStatus {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
blobstr, html.EscapeString(sfn.String()))
}
fmt.Fprintf(rw, "</ul>")
}
if len(sh.recentErrors) > 0 {
fmt.Fprintf(rw, "<h2>Recent Errors:</h2><ul>")
for _, te := range sh.recentErrors {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
te.t.Format(time.RFC3339),
html.EscapeString(te.err.Error()))
}
fmt.Fprintf(rw, "</ul>")
}
}
func (sh *SyncHandler) setStatus(s string, args ...interface{}) {
s = time.Now().UTC().Format(time.RFC3339) + ": " + fmt.Sprintf(s, args...)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.status = s
}
func (sh *SyncHandler) setBlobStatus(blobref string, s fmt.Stringer) {
sh.lk.Lock()
defer sh.lk.Unlock()
if s != nil {
sh.blobStatus[blobref] = s
} else {
delete(sh.blobStatus, blobref)
}
}
func (sh *SyncHandler) addErrorToLog(err error) {
sh.logf("%v", err)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.recentErrors = append(sh.recentErrors, timestampedError{time.Now().UTC(), err})
if len(sh.recentErrors) > maxErrors {
// Kinda lame, but whatever. Only for errors, rare.
copy(sh.recentErrors[:maxErrors], sh.recentErrors[1:maxErrors+1])
sh.recentErrors = sh.recentErrors[:maxErrors]
}
}
type copyResult struct {
sb blob.SizedRef
err error
}
func | (ctx *context.Context, src blobserver.BlobEnumerator) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
return blobserver.EnumerateAll(ctx, src, func(sb blob.SizedRef) error {
select {
case dst <- sb:
case <-intr:
return errors.New("interrupted")
}
return nil
})
}
}
func (sh *SyncHandler) enumerateQueuedBlobs(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
it := sh.queue.Find("", "")
for it.Next() {
br, ok := blob.Parse(it.Key())
size, err := strconv.ParseInt(it.Value(), 10, 64)
if !ok || err != nil {
sh.logf("ERROR: bogus sync queue entry: %q => %q", it.Key(), it.Value())
continue
}
select {
case dst <- blob.SizedRef{br, size}:
case <-intr:
return it.Close()
}
}
return it.Close()
}
func (sh *SyncHandler) enumerateBlobc(first blob.SizedRef) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
dst <- first
for {
select {
case sb := <-sh.blobc:
dst <- sb
default:
return nil
}
}
}
}
func (sh *SyncHandler) runSync(srcName string, enumSrc func(chan<- blob.SizedRef, <-chan struct{}) error) int {
enumch := make(chan blob.SizedRef, | blobserverEnumerator | identifier_name |
sync.go | ) String() string {
return fmt.Sprintf("[SyncHandler %v -> %v]", sh.fromName, sh.toName)
}
func (sh *SyncHandler) logf(format string, args ...interface{}) {
log.Printf(sh.String()+" "+format, args...)
}
var (
_ blobserver.Storage = (*SyncHandler)(nil)
_ blobserver.HandlerIniter = (*SyncHandler)(nil)
)
func init() {
blobserver.RegisterHandlerConstructor("sync", newSyncFromConfig)
}
func newSyncFromConfig(ld blobserver.Loader, conf jsonconfig.Obj) (http.Handler, error) {
var (
from = conf.RequiredString("from")
to = conf.RequiredString("to")
fullSync = conf.OptionalBool("fullSyncOnStart", false)
blockFullSync = conf.OptionalBool("blockingFullSyncOnStart", false)
idle = conf.OptionalBool("idle", false)
queueConf = conf.OptionalObject("queue")
)
if err := conf.Validate(); err != nil {
return nil, err
}
if idle {
synch, err := createIdleSyncHandler(from, to)
if err != nil {
return nil, err
}
return synch, nil
}
if len(queueConf) == 0 {
return nil, errors.New(`Missing required "queue" object`)
}
q, err := sorted.NewKeyValue(queueConf)
if err != nil {
return nil, err
}
isToIndex := false
fromBs, err := ld.GetStorage(from)
if err != nil {
return nil, err
}
toBs, err := ld.GetStorage(to)
if err != nil {
return nil, err
}
if _, ok := fromBs.(*index.Index); !ok {
if _, ok := toBs.(*index.Index); ok {
isToIndex = true
}
}
sh, err := createSyncHandler(from, to, fromBs, toBs, q, isToIndex)
if err != nil {
return nil, err
}
if fullSync || blockFullSync {
didFullSync := make(chan bool, 1)
go func() {
n := sh.runSync("queue", sh.enumerateQueuedBlobs)
sh.logf("Queue sync copied %d blobs", n)
n = sh.runSync("full", blobserverEnumerator(context.TODO(), fromBs))
sh.logf("Full sync copied %d blobs", n)
didFullSync <- true
sh.syncQueueLoop()
}()
if blockFullSync {
sh.logf("Blocking startup, waiting for full sync from %q to %q", from, to)
<-didFullSync
sh.logf("Full sync complete.")
}
} else {
go sh.syncQueueLoop()
}
blobserver.GetHub(fromBs).AddReceiveHook(sh.enqueue)
return sh, nil
}
func (sh *SyncHandler) InitHandler(hl blobserver.FindHandlerByTyper) error {
_, h, err := hl.FindHandlerByType("root")
if err == blobserver.ErrHandlerTypeNotFound {
// It's optional. We register ourselves if it's there.
return nil
}
if err != nil {
return err
}
h.(*RootHandler).registerSyncHandler(sh)
return nil
}
type timestampedError struct {
t time.Time
err error
}
func createSyncHandler(fromName, toName string,
from blobserver.Storage, to blobserver.BlobReceiver,
queue sorted.KeyValue, isToIndex bool) (*SyncHandler, error) {
h := &SyncHandler{
copierPoolSize: 3,
from: from,
to: to,
fromName: fromName,
toName: toName,
queue: queue,
toIndex: isToIndex,
blobc: make(chan blob.SizedRef, 8),
status: "not started",
blobStatus: make(map[string]fmt.Stringer),
}
return h, nil
}
func createIdleSyncHandler(fromName, toName string) (*SyncHandler, error) {
h := &SyncHandler{
fromName: fromName,
toName: toName,
idle: true,
status: "disabled",
}
return h, nil
}
func (sh *SyncHandler) discoveryMap() map[string]interface{} {
// TODO(mpl): more status info
return map[string]interface{}{
"from": sh.fromName,
"to": sh.toName,
"toIndex": sh.toIndex,
}
}
func (sh *SyncHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
sh.lk.Lock()
defer sh.lk.Unlock()
fmt.Fprintf(rw, "<h1>%s to %s Sync Status</h1><p><b>Current status: </b>%s</p>",
sh.fromName, sh.toName, html.EscapeString(sh.status))
if sh.idle {
return
}
fmt.Fprintf(rw, "<h2>Stats:</h2><ul>")
fmt.Fprintf(rw, "<li>Blobs copied: %d</li>", sh.totalCopies)
fmt.Fprintf(rw, "<li>Bytes copied: %d</li>", sh.totalCopyBytes)
if !sh.recentCopyTime.IsZero() {
fmt.Fprintf(rw, "<li>Most recent copy: %s</li>", sh.recentCopyTime.Format(time.RFC3339))
}
fmt.Fprintf(rw, "<li>Copy errors: %d</li>", sh.totalErrors)
fmt.Fprintf(rw, "</ul>")
if len(sh.blobStatus) > 0 {
fmt.Fprintf(rw, "<h2>Current Copies:</h2><ul>")
for blobstr, sfn := range sh.blobStatus {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
blobstr, html.EscapeString(sfn.String()))
}
fmt.Fprintf(rw, "</ul>")
}
if len(sh.recentErrors) > 0 {
fmt.Fprintf(rw, "<h2>Recent Errors:</h2><ul>")
for _, te := range sh.recentErrors {
fmt.Fprintf(rw, "<li>%s: %s</li>\n",
te.t.Format(time.RFC3339),
html.EscapeString(te.err.Error()))
}
fmt.Fprintf(rw, "</ul>")
}
}
func (sh *SyncHandler) setStatus(s string, args ...interface{}) {
s = time.Now().UTC().Format(time.RFC3339) + ": " + fmt.Sprintf(s, args...)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.status = s
}
func (sh *SyncHandler) setBlobStatus(blobref string, s fmt.Stringer) {
sh.lk.Lock()
defer sh.lk.Unlock()
if s != nil {
sh.blobStatus[blobref] = s
} else {
delete(sh.blobStatus, blobref)
}
}
func (sh *SyncHandler) addErrorToLog(err error) {
sh.logf("%v", err)
sh.lk.Lock()
defer sh.lk.Unlock()
sh.recentErrors = append(sh.recentErrors, timestampedError{time.Now().UTC(), err})
if len(sh.recentErrors) > maxErrors {
// Kinda lame, but whatever. Only for errors, rare.
copy(sh.recentErrors[:maxErrors], sh.recentErrors[1:maxErrors+1])
sh.recentErrors = sh.recentErrors[:maxErrors]
}
}
type copyResult struct {
sb blob.SizedRef
err error
}
func blobserverEnumerator(ctx *context.Context, src blobserver.BlobEnumerator) func(chan<- blob.SizedRef, <-chan struct{}) error |
func (sh *SyncHandler) enumerateQueuedBlobs(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
it := sh.queue.Find("", "")
for it.Next() {
br, ok := blob.Parse(it.Key())
size, err := strconv.ParseInt(it.Value(), 10, 64)
if !ok || err != nil {
sh.logf("ERROR: bogus sync queue entry: %q => %q", it.Key(), it.Value())
continue
}
select {
case dst <- blob.SizedRef{br, size}:
case <-intr:
return it.Close()
}
}
return it.Close()
}
func (sh *SyncHandler) enumerateBlobc(first blob.SizedRef) func(chan<- blob.SizedRef, <-chan struct{}) error {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
defer close(dst)
dst <- first
for {
select {
case sb := <-sh.blobc:
dst <- sb
default:
return nil
}
}
}
}
func (sh *SyncHandler) runSync(srcName string, enumSrc func(chan<- blob.SizedRef, <-chan struct{}) error) int {
enumch := make(chan blob.SizedRef, | {
return func(dst chan<- blob.SizedRef, intr <-chan struct{}) error {
return blobserver.EnumerateAll(ctx, src, func(sb blob.SizedRef) error {
select {
case dst <- sb:
case <-intr:
return errors.New("interrupted")
}
return nil
})
}
} | identifier_body |
feature_table.rs | QualifierValue::nom)), |(_, v)| v);
map(
tuple((parse_name, opt(parse_value))),
|(name, value)| Qualifier{ name, value }
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum QualifierValue {
QuotedText(String),
VocabularyTerm(FtString),
ReferenceNumber(u32),
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for QualifierValue{
fn nom(input: &'a str) -> IResult<&'a str, QualifierValue, E> {
let parse_quoted_text =
map(
tuple((tag("\""), take_while(|c| c != '"'), tag("\""))),
|(_, v, _): (&str, &str, &str)| QualifierValue::QuotedText(v.to_string()));
let parse_vocabulary_term =
map(
FtString::nom,
QualifierValue::VocabularyTerm);
let parse_reference_number =
map(
tuple((tag("["), u32::nom, tag("]"))),
|(_, d, _)| QualifierValue::ReferenceNumber(d));
alt((
parse_quoted_text,
parse_vocabulary_term,
parse_reference_number
))(input)
}
}
//
//
// Location data model starts here
//
// Should really be in a sub-module I guess
//
//
/// A point within a sequence, representing a specific nucleotide. Counts from 1.
#[derive(Debug, PartialEq, Eq)]
pub struct Point(u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Point {
fn nom(input: &'a str) -> IResult<&'a str, Point, E> {
map(u32::nom, Point)(input)
}
}
/// A position between two bases in a sequence.
/// pub
/// For example, 122^123. The locations must be consecutive.
///
/// For example, 100^1 for a circular sequence of length 100.
#[derive(Debug, PartialEq, Eq)]
pub struct Between(u32, u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Between {
fn nom(input: &'a str) -> IResult<&'a str, Between, E> {
map(
tuple((
u32::nom,
tag("^"),
u32::nom
)),
|(from, _, to)| Between(from, to)
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Position {
Point(Point),
Between(Between)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Position {
fn nom(input: &'a str) -> IResult<&'a str, Position, E> {
alt((
map(Between::nom, Position::Between),
map(Point::nom, Position::Point)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Local {
Point(Point),
Between(Between),
Within { from: Point, to: Point },
Span { from: Position, to: Position, before_from: bool, after_to: bool },
}
impl Local {
pub fn span(from: u32, to: u32) -> Local {
Local::Span {
from: Position::Point(Point(from)),
to: Position::Point(Point(to)),
before_from: false,
after_to: false }
}
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Local {
fn nom(input: &'a str) -> IResult<&'a str, Local, E> {
let parse_within = map(
tuple((Point::nom, tag("."), Point::nom)),
|(from, _, to)| Local::Within { from, to });
let parse_span = map(
tuple((
opt(tag("<")), Position::nom, tag(".."), opt(tag(">")), Position::nom)),
|(before_from, from, _, after_to, to)| Local::Span {
from,
to,
before_from: before_from.is_some(),
after_to: after_to.is_some() }
);
alt((
map(Between::nom, Local::Between),
parse_within,
parse_span,
map(Point::nom, Local::Point), // must do this last as it's a prefix of the others
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Loc {
Remote { within: String, at: Local },
Local(Local)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Loc {
fn nom(input: &'a str) -> IResult<&'a str, Loc, E> {
let parse_accession = take_while1(|c| {
let b = c as u8;
is_alphanumeric(b) || b == b'.'
});
alt((
map(
tuple((parse_accession, tag(":"), Local::nom)),
|(within, _, at)| Loc::Remote { within: within.to_string(), at }
),
map(Local::nom, Loc::Local)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum LocOp {
Loc(Loc),
Complement(Box<LocOp>),
Join(Vec<LocOp>),
Order(Vec<LocOp>)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for LocOp {
fn nom(input: &'a str) -> IResult<&'a str, LocOp, E> {
let parse_complement =
map(
tuple((
tag("complement("),
cut(LocOp::nom),
tag(")")
)),
|(_, loc, _)| loc
);
let parse_join =
map(
tuple((
tag("join("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
let parse_order =
map(
tuple((
tag("order("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
alt((
map(Loc::nom, LocOp::Loc),
map(parse_complement, |loc| LocOp::Complement(Box::new(loc))),
map(parse_join, LocOp::Join),
map(parse_order, LocOp::Order)
))(input)
}
}
#[cfg(test)]
mod tests {
use super::*;
use nom::error::{
convert_error,
VerboseError,
};
fn assert_nom_to_expected<'a, T>() -> impl Fn(&'a str, T) -> ()
where
T: Nommed<&'a str, VerboseError<&'a str>> + std::fmt::Debug + PartialEq
{
move |input: &str, expected: T| {
match T::nom(input) {
Ok((rem, ref res)) if !rem.is_empty() => panic!("Non-empty remaining input {}, parsed out {:?}", rem, res),
Ok((_, res)) => assert_eq!(res, expected, "Got result {:?} but expected {:?}", res, expected),
Err(nom::Err::Error(e)) | Err(nom::Err::Failure(e)) => panic!("Problem: {}", convert_error(input, e)),
e => panic!("Unknown error: {:?}", e)
}
}
}
// #[test]
// fn test_parse_feature_record_from_spec() {
// let expect = assert_nom_to_expected::<FeatureRecord>();
// expect(
// r#"
// source 1..1000
// /culture_collection="ATCC:11775"
// /culture_collection="CECT:515"
// "#,
// FeatureRecord {
// key: "source".to_string(),
// location: LocOp::Loc(Loc::Local(Local::span(1, 1000))),
// qualifiers: vec![]
// }
// )
// }
#[test]
fn test_parse_qualifiers_from_spec() {
let expect = assert_nom_to_expected::<Qualifier>();
expect(
"/pseudo",
Qualifier {
name: FtString("pseudo".to_string()),
value: None });
expect(
"/citation=[1]",
Qualifier {
name: FtString("citation".to_string()),
value: Some(QualifierValue::ReferenceNumber(1)) });
expect(
"/gene=\"arsC\"",
Qualifier {
name: FtString("gene".to_string()),
value: Some(QualifierValue::QuotedText("arsC".to_string()))});
expect(
"/rpt_type=DISPERSED",
Qualifier {
name: FtString("rpt_type".to_string()),
value: Some(QualifierValue::VocabularyTerm(FtString("DISPERSED".to_string())))});
}
#[test]
fn test_parse_locations_from_spec() {
let expect = assert_nom_to_expected::<LocOp>();
expect(
"467",
LocOp::Loc(Loc::Local(Local::Point(Point(467)))));
| random_line_split |
||
feature_table.rs | , FtString, E> {
let uc = Interval('A', 'Z');
let lc = Interval('a', 'z');
let di = Interval('0', '9');
let misc = "_-'*";
let ft_char = {
move |c: char|
uc.contains(&c) ||
lc.contains(&c) ||
di.contains(&c) ||
misc.contains(c)
};
let alpha = {
move |c: char|
uc.contains(&c) ||
lc.contains(&c)
};
map(
verify(
take_while_m_n(1, 20, ft_char),
move |s: &str| s.chars().any(alpha)
),
|s: &str| FtString(s.to_string())
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Qualifier {
name: FtString,
value: Option<QualifierValue>
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Qualifier {
fn nom(input: &'a str) -> IResult<&'a str, Qualifier, E> {
let parse_name = map(tuple((tag("/"), FtString::nom)), |(_, n)| n);
let parse_value = map(tuple((tag("="), QualifierValue::nom)), |(_, v)| v);
map(
tuple((parse_name, opt(parse_value))),
|(name, value)| Qualifier{ name, value }
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum QualifierValue {
QuotedText(String),
VocabularyTerm(FtString),
ReferenceNumber(u32),
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for QualifierValue{
fn nom(input: &'a str) -> IResult<&'a str, QualifierValue, E> {
let parse_quoted_text =
map(
tuple((tag("\""), take_while(|c| c != '"'), tag("\""))),
|(_, v, _): (&str, &str, &str)| QualifierValue::QuotedText(v.to_string()));
let parse_vocabulary_term =
map(
FtString::nom,
QualifierValue::VocabularyTerm);
let parse_reference_number =
map(
tuple((tag("["), u32::nom, tag("]"))),
|(_, d, _)| QualifierValue::ReferenceNumber(d));
alt((
parse_quoted_text,
parse_vocabulary_term,
parse_reference_number
))(input)
}
}
//
//
// Location data model starts here
//
// Should really be in a sub-module I guess
//
//
/// A point within a sequence, representing a specific nucleotide. Counts from 1.
#[derive(Debug, PartialEq, Eq)]
pub struct Point(u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Point {
fn nom(input: &'a str) -> IResult<&'a str, Point, E> {
map(u32::nom, Point)(input)
}
}
/// A position between two bases in a sequence.
/// pub
/// For example, 122^123. The locations must be consecutive.
///
/// For example, 100^1 for a circular sequence of length 100.
#[derive(Debug, PartialEq, Eq)]
pub struct Between(u32, u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Between {
fn nom(input: &'a str) -> IResult<&'a str, Between, E> {
map(
tuple((
u32::nom,
tag("^"),
u32::nom
)),
|(from, _, to)| Between(from, to)
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Position {
Point(Point),
Between(Between)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Position {
fn nom(input: &'a str) -> IResult<&'a str, Position, E> {
alt((
map(Between::nom, Position::Between),
map(Point::nom, Position::Point)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Local {
Point(Point),
Between(Between),
Within { from: Point, to: Point },
Span { from: Position, to: Position, before_from: bool, after_to: bool },
}
impl Local {
pub fn span(from: u32, to: u32) -> Local {
Local::Span {
from: Position::Point(Point(from)),
to: Position::Point(Point(to)),
before_from: false,
after_to: false }
}
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Local {
fn nom(input: &'a str) -> IResult<&'a str, Local, E> | ))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Loc {
Remote { within: String, at: Local },
Local(Local)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Loc {
fn nom(input: &'a str) -> IResult<&'a str, Loc, E> {
let parse_accession = take_while1(|c| {
let b = c as u8;
is_alphanumeric(b) || b == b'.'
});
alt((
map(
tuple((parse_accession, tag(":"), Local::nom)),
|(within, _, at)| Loc::Remote { within: within.to_string(), at }
),
map(Local::nom, Loc::Local)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum LocOp {
Loc(Loc),
Complement(Box<LocOp>),
Join(Vec<LocOp>),
Order(Vec<LocOp>)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for LocOp {
fn nom(input: &'a str) -> IResult<&'a str, LocOp, E> {
let parse_complement =
map(
tuple((
tag("complement("),
cut(LocOp::nom),
tag(")")
)),
|(_, loc, _)| loc
);
let parse_join =
map(
tuple((
tag("join("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
let parse_order =
map(
tuple((
tag("order("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
alt((
map(Loc::nom, LocOp::Loc),
map(parse_complement, |loc| LocOp::Complement(Box::new(loc))),
map(parse_join, LocOp::Join),
map(parse_order, LocOp::Order)
))(input)
}
}
#[cfg(test)]
mod tests {
use super::*;
use nom::error::{
convert_error,
VerboseError,
};
fn assert_nom_to_expected<'a, T>() -> impl Fn(&'a str, T) -> ()
where
T: Nommed<&'a str, VerboseError<&'a str>> + std::fmt::Debug + PartialEq
{
move |input: &str, expected: T| {
match T::nom(input) {
Ok((rem, ref res)) if !rem.is_empty() => panic!("Non-empty remaining input {}, parsed out {:?}", rem, res),
Ok((_, res)) => assert_eq!(res, expected, "Got result {:?} but expected {:?}", res, expected),
Err(nom::Err::Error(e)) | Err(nom::Err::Failure(e)) => panic!("Problem: {}", convert_error(input, e)),
e => panic!("Unknown error: {:?}", e)
}
}
}
// #[test]
// fn test_parse_feature_record_from_spec() {
// let expect = assert_nom_to_expected::<FeatureRecord>();
// expect(
// r#"
// source 1..1000
// /culture_collection="ATCC:11775"
// /culture_collection="CECT:515"
// "#,
// FeatureRecord {
// key: "source".to_string(),
// | {
let parse_within = map(
tuple((Point::nom, tag("."), Point::nom)),
|(from, _, to)| Local::Within { from, to });
let parse_span = map(
tuple((
opt(tag("<")), Position::nom, tag(".."), opt(tag(">")), Position::nom)),
|(before_from, from, _, after_to, to)| Local::Span {
from,
to,
before_from: before_from.is_some(),
after_to: after_to.is_some() }
);
alt((
map(Between::nom, Local::Between),
parse_within,
parse_span,
map(Point::nom, Local::Point), // must do this last as it's a prefix of the others | identifier_body |
feature_table.rs | {
key: String,
location: LocOp,
qualifiers: Vec<Qualifier>
}
// impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for FeatureRecord {
// fn nom(input: &'a str) -> IResult<&'a str, FeatureRecord, E> {
// }
// }
/// An ID that's valid within the feature table.
///
/// This is:
/// * At least one letter
/// * Upper case, lower case letters
/// * Numbers 0..9
/// * Underscore (_)
/// * Hyphen (-)
/// * Single quote (')
/// * Asterisk (*)
/// The maximum length is 20 characters.
#[derive(Debug, PartialEq, Eq)]
pub struct FtString(String);
// litle utility for ranges.
//
// Note: couldn't use 'a'..='b' because this is an iterator, so doesn't
// implement `Copy`.
#[derive(Clone, Copy)]
struct Interval<T>(T, T);
impl <T : PartialOrd> Interval<T> {
fn contains(&self, e: &T) -> bool {
self.0 <= *e &&
*e <= self.1
}
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for FtString {
fn nom(input: &'a str) -> IResult<&'a str, FtString, E> {
let uc = Interval('A', 'Z');
let lc = Interval('a', 'z');
let di = Interval('0', '9');
let misc = "_-'*";
let ft_char = {
move |c: char|
uc.contains(&c) ||
lc.contains(&c) ||
di.contains(&c) ||
misc.contains(c)
};
let alpha = {
move |c: char|
uc.contains(&c) ||
lc.contains(&c)
};
map(
verify(
take_while_m_n(1, 20, ft_char),
move |s: &str| s.chars().any(alpha)
),
|s: &str| FtString(s.to_string())
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Qualifier {
name: FtString,
value: Option<QualifierValue>
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Qualifier {
fn nom(input: &'a str) -> IResult<&'a str, Qualifier, E> {
let parse_name = map(tuple((tag("/"), FtString::nom)), |(_, n)| n);
let parse_value = map(tuple((tag("="), QualifierValue::nom)), |(_, v)| v);
map(
tuple((parse_name, opt(parse_value))),
|(name, value)| Qualifier{ name, value }
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum QualifierValue {
QuotedText(String),
VocabularyTerm(FtString),
ReferenceNumber(u32),
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for QualifierValue{
fn nom(input: &'a str) -> IResult<&'a str, QualifierValue, E> {
let parse_quoted_text =
map(
tuple((tag("\""), take_while(|c| c != '"'), tag("\""))),
|(_, v, _): (&str, &str, &str)| QualifierValue::QuotedText(v.to_string()));
let parse_vocabulary_term =
map(
FtString::nom,
QualifierValue::VocabularyTerm);
let parse_reference_number =
map(
tuple((tag("["), u32::nom, tag("]"))),
|(_, d, _)| QualifierValue::ReferenceNumber(d));
alt((
parse_quoted_text,
parse_vocabulary_term,
parse_reference_number
))(input)
}
}
//
//
// Location data model starts here
//
// Should really be in a sub-module I guess
//
//
/// A point within a sequence, representing a specific nucleotide. Counts from 1.
#[derive(Debug, PartialEq, Eq)]
pub struct Point(u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Point {
fn nom(input: &'a str) -> IResult<&'a str, Point, E> {
map(u32::nom, Point)(input)
}
}
/// A position between two bases in a sequence.
/// pub
/// For example, 122^123. The locations must be consecutive.
///
/// For example, 100^1 for a circular sequence of length 100.
#[derive(Debug, PartialEq, Eq)]
pub struct Between(u32, u32);
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Between {
fn nom(input: &'a str) -> IResult<&'a str, Between, E> {
map(
tuple((
u32::nom,
tag("^"),
u32::nom
)),
|(from, _, to)| Between(from, to)
)(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Position {
Point(Point),
Between(Between)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Position {
fn nom(input: &'a str) -> IResult<&'a str, Position, E> {
alt((
map(Between::nom, Position::Between),
map(Point::nom, Position::Point)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Local {
Point(Point),
Between(Between),
Within { from: Point, to: Point },
Span { from: Position, to: Position, before_from: bool, after_to: bool },
}
impl Local {
pub fn span(from: u32, to: u32) -> Local {
Local::Span {
from: Position::Point(Point(from)),
to: Position::Point(Point(to)),
before_from: false,
after_to: false }
}
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Local {
fn nom(input: &'a str) -> IResult<&'a str, Local, E> {
let parse_within = map(
tuple((Point::nom, tag("."), Point::nom)),
|(from, _, to)| Local::Within { from, to });
let parse_span = map(
tuple((
opt(tag("<")), Position::nom, tag(".."), opt(tag(">")), Position::nom)),
|(before_from, from, _, after_to, to)| Local::Span {
from,
to,
before_from: before_from.is_some(),
after_to: after_to.is_some() }
);
alt((
map(Between::nom, Local::Between),
parse_within,
parse_span,
map(Point::nom, Local::Point), // must do this last as it's a prefix of the others
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Loc {
Remote { within: String, at: Local },
Local(Local)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Loc {
fn nom(input: &'a str) -> IResult<&'a str, Loc, E> {
let parse_accession = take_while1(|c| {
let b = c as u8;
is_alphanumeric(b) || b == b'.'
});
alt((
map(
tuple((parse_accession, tag(":"), Local::nom)),
|(within, _, at)| Loc::Remote { within: within.to_string(), at }
),
map(Local::nom, Loc::Local)
))(input)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum LocOp {
Loc(Loc),
Complement(Box<LocOp>),
Join(Vec<LocOp>),
Order(Vec<LocOp>)
}
impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for LocOp {
fn nom(input: &'a str) -> IResult<&'a str, LocOp, E> {
let parse_complement =
map(
tuple((
tag("complement("),
cut(LocOp::nom),
tag(")")
)),
|(_, loc, _)| loc
);
let parse_join =
map(
tuple((
tag("join("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
let parse_order =
map(
tuple((
tag("order("),
cut(separated_list(tag(","), LocOp::nom)),
tag(")")
)),
|(_, locs, _)| locs
);
alt((
map(Loc::nom, LocOp::Loc),
map(parse_complement, |loc| LocOp::Complement(Box::new(loc))),
map(parse_join, LocOp::Join),
map(parse_order, LocOp::Order)
)) | FeatureRecord | identifier_name |
|
room_list_item.go | this.SetLastMsg(fmt.Sprintf("%s: %s",
gopp.StrSuf4ui(msgo.PeerNameUi, 9, 1), msgo.LastMsgUi), msgo.Time, msgo.EventId)
this.totalCount += 1
if uictx.msgwin.item == this {
uictx.uiw.LabelMsgCount2.SetText(fmt.Sprintf("%3d", this.totalCount))
uictx.uiw.LabelMsgCount.SetText(fmt.Sprintf("%3d", this.totalCount))
}
this.unreadedCount += 1
this.ToolButton.SetText(fmt.Sprintf("%d", this.unreadedCount))
// this.floatUnreadCountLabel.SetText(fmt.Sprintf("%d", this.unreadedCount))
}
func (this *RoomListItem) UpdateMessageMimeContent(msgo *Message, msgiw *MessageItem) {
if !msgo.IsFile() {
return
}
fil := msgo.GetFileInfoLine()
gopp.NilPrint(fil, msgo.Msg)
if fil == nil {
return
}
locfname := store.GetFSC().GetFilePath(fil.Md5str)
rmturl := thscli.HttpFsUrlFor(fil.Md5str)
reloadMsgItem := func(txt string) { msgiw.Label_5.SetText(txt) }
locdir := store.GetFSC().GetDir()
if ok, _ := afero.Exists(afero.NewOsFs(), locfname); ok {
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
} else {
richtxt := fmt.Sprintf("Loading... %s: %s", fil.Mime, humanize.Bytes(uint64(fil.Length)))
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
go func() {
time.Sleep(3 * time.Second)
ro := &grequests.RequestOptions{}
resp, err := grequests.Get(rmturl, ro)
gopp.ErrPrint(err, rmturl)
err = resp.DownloadToFile(locfname)
gopp.ErrPrint(err, rmturl)
runOnUiThread(func() { reloadMsgItem("Switching...") })
time.Sleep(3 * time.Second)
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
runOnUiThread(func() { reloadMsgItem(richtxt) })
}()
}
}
func (this *RoomListItem) UpdateMessageState(msgo *Message) {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.UserCode == msgo.UserCode {
msgo_.EventId = msgo.EventId
msgo_.Sent = msgo.Sent
break
}
}
for idx := len(this.msgitmdl) - 1; idx >= 0; idx-- {
msgitm := this.msgitmdl[idx]
if msgitm.UserCode == msgo.UserCode {
if !msgitm.Sent && msgo.Sent {
msgitm.Sent = msgo.Sent
msgitm.LabelSendState.Clear()
msgitm.LabelSendState.SetToolTip(gopp.ToStr(gopp.ToStrs(msgo.Sent, msgo.UserCode)))
}
break
}
}
}
func (this *RoomListItem) ClearAvatar(frndpk string) {
this.cticon = GetIdentIcon(frndpk)
this.ToolButton_2.SetIcon(this.cticon)
uictx.msgwin.SetIconForItem(this)
}
func (this *RoomListItem) SetAvatar(idico *qtgui.QIcon) {
this.cticon = idico
this.ToolButton_2.SetIcon(this.cticon)
uictx.msgwin.SetIconForItem(this)
}
func (this *RoomListItem) SetAvatarForId(frndpk string) {
locfname := store.GetFSC().GetFilePath(frndpk)
idico := qtgui.NewQIcon2(locfname)
this.SetAvatar(idico)
}
func (this *RoomListItem) SetAvatarForMessage(msgo *Message, frndpk string) {
fil := msgo.GetFileInfoLine()
gopp.NilPrint(fil, msgo.Msg)
if fil == nil {
return
}
locfname := store.GetFSC().GetFilePath(frndpk)
rmturl := thscli.HttpFsUrlFor(frndpk)
setFriendIcon := func(thefname string) {
icon := qtgui.NewQIcon2(thefname)
if icon != nil && !icon.IsNull() {
this.SetAvatar(icon)
} else {
log.Println("Friend icon not supported:", locfname)
}
}
if fil.Length == 0 { // clear avatar
this.ClearAvatar(frndpk)
return
}
go func() {
ro := &grequests.RequestOptions{}
resp, err := grequests.Get(rmturl, ro)
gopp.ErrPrint(err, rmturl)
err = resp.DownloadToFile(locfname)
gopp.ErrPrint(err, rmturl)
runOnUiThread(func() { setFriendIcon(locfname) })
}()
}
func (this *RoomListItem) FindMessageByUserCode(userCode int64) *Message {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.UserCode == userCode {
return msgo_
}
}
return nil
}
func (this *RoomListItem) FindMessageViewByEventId(eventId int64) *MessageItem {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.EventId == eventId {
return this.msgitmdl[idx]
}
}
return nil
}
// TODO 计算是否省略掉显示与上一条相同的用户名
func (this *RoomListItem) AddMessageHiddenCloseSameUser(prev bool) {
// prev is true, compare [0], [1]
// prev is false, compare [len-2], [len-1]
if len(this.msgos) < 2 {
return
}
var m0, m1 *Message
if prev {
m0 = this.msgos[0]
m1 = this.msgos[1]
} else {
m0 = this.msgos[len(this.msgos)-2]
m1 = this.msgos[len(this.msgos)-1]
}
if m0.PeerNameUi == m1.PeerNameUi {
// can not get Ui_MessageItemView
}
}
func (this *RoomListItem) GetName() string {
return gopp.IfElseStr(this.isgroup, this.grpInfo.GetTitle(), this.frndInfo.GetName())
}
func (this *RoomListItem) GetId() string {
if this.isgroup {
// log.Println(this.grpInfo.GetGroupId(), this.grpInfo.Title)
}
return gopp.IfElseStr(this.isgroup, this.grpInfo.GetGroupId(), this.frndInfo.GetPubkey())
}
func (this *RoomListItem) GetNum() uint32 {
return uint32(gopp.IfElseInt(this.isgroup, int(this.grpInfo.GetGnum()), int(this.frndInfo.GetFnum())))
}
func (this *RoomListItem) UpdateName(name string) {
if this.isgroup {
if this.grpInfo.Title != name {
this.grpInfo.Title = name
this.SetContactInfo(this.grpInfo)
// this.Label_2.SetText(gopp.StrSuf4ui(name, 26))
// this.Label_2.SetToolTip(name)
// this.ToolButton_2.SetToolTip(name + "." + this.GetId()[:7])
}
} else {
if this.frndInfo.Name != name {
this.frndInfo.Name = name
this.SetContactInfo(this.frndInfo)
// this.Label_2.SetText(gopp.StrSuf4ui(name, 26))
// this.Label_2.SetToolTip(name)
// this.ToolButton_2.SetToolTip(name + "." + this.GetId()[:7])
}
}
}
func (this *RoomListItem) UpdateStatusMessage(statusText string) {
if !this.isgroup {
if this.frndInfo.Stmsg != statusText {
this.frndInfo.Stmsg = statusText
this.SetContactInfo(this.frndInfo)
}
} else {
}
}
func (this *RoomListItem) UpdateReaded() {
mw := uictx.mw
curval := mw.ScrollArea_2.VerticalScrollBar().Value()
minval := mw.ScrollArea_2.VerticalScrollBar().Minimum()
maxval := mw.ScrollArea_2.VerticalScrollBar().Maximum()
log.Println(this.unreadedCount, this.pressed, curval, minval, maxval)
if this.unreadedCount > 0 && this.pressed {
if curval == maxval || maxval == -1 {
this.unreadedCount = 0
this.ToolButton.SetText("")
}
}
}
// TODO how custom setting this
func init() {
if runtime.GOOS == "android" {
secondsEastOfUTC := int((8 * time.Hour | ).Seconds())
cqzone := time.FixedZone("Chongqing", secondsEastOfUTC)
time.Local = cqzone
}
}
// 两类时间,server time, client time
func (this *RoomListItem) SetLastMsg(ms | identifier_body |
|
room_list_item.go | qtwidgets.DeleteQMenu(item.menu)
}
item.menu = nil
if item.cticon != nil {
qtgui.DeleteQIcon(item.cticon)
}
item.cticon = nil
if item.sticon != nil {
qtgui.DeleteQIcon(item.sticon)
}
item.sticon = nil
item.OnConextMenu = nil
item.subws = nil
item.msgitmdl = nil
item.msgos = nil
}
}
/////////////////
type RoomListItem struct {
*Ui_ContactItemView
OnConextMenu func(w *qtwidgets.QWidget, pos *qtcore.QPoint)
cticon *qtgui.QIcon
sticon *qtgui.QIcon
subws []qtwidgets.QWidget_ITF
menu *qtwidgets.QMenu
floatUnreadCountLabel *qtwidgets.QLabel
msgitmdl []*MessageItem
msgos []*Message
pressed bool
hovered bool
isgroup bool
frndInfo *thspbs.FriendInfo
grpInfo *thspbs.GroupInfo
unreadedCount int
totalCount int
timeline thscli.TimeLine
WaitSyncStoreTimeLineCount int
LastMsgEventId int64
}
func NewRoomListItem() *RoomListItem {
this := &RoomListItem{}
this.Ui_ContactItemView = NewUi_ContactItemView2()
this.initUis()
this.initEvents()
return this
}
func NewRoomListItem2(info *thspbs.FriendInfo) *RoomListItem {
this := &RoomListItem{}
this.Ui_ContactItemView = NewUi_ContactItemView2()
this.initUis()
this.SetContactInfo(info)
this.initEvents()
return this
}
func NewRoomListItem3(info *thspbs.GroupInfo) *RoomListItem {
this := &RoomListItem{}
this.Ui_ContactItemView = NewUi_ContactItemView2()
this.initUis()
this.SetContactInfo(info)
this.initEvents()
return this
}
func (this *RoomListItem) initUis() {
if !gopp.IsAndroid() {
this.ToolButton.SetIconSize(qtcore.NewQSize1(12, 12))
}
this.floatUnreadCountLabel = this.floatTextOverWidget(this.ToolButton)
// this.Ui_ContactItemView.ContactItemView.SetMinimumHeight(20 * 2)
}
func (this *RoomListItem) initEvents() {
labs := []*qtwidgets.QLabel{this.Label_2, this.Label_3, this.Label_4, this.LabelLastMsgTime}
for _, lab := range labs {
lab.Clear()
lab.SetAttribute(qtcore.Qt__WA_TranslucentBackground, false)
lab.SetMouseTracking(true)
this.subws = append(this.subws, lab)
}
this.ToolButton.SetText("")
this.ToolButton.SetMouseTracking(true) |
onMousePress := func(event *qtgui.QMouseEvent) {
uictx.gtreco.onMousePress(this, event)
// log.Println(event)
if event.Button() == qtcore.Qt__LeftButton {
for _, room := range uictx.ctitmdl {
if room != this {
room.SetPressState(false)
}
}
this.SetPressState(true)
}
}
onMouseRelease := func(event *qtgui.QMouseEvent) {
// log.Println(event)
uictx.gtreco.onMouseRelease(this, event)
}
onMouseMove := func(event *qtgui.QMouseEvent) {
uictx.gtreco.onMouseMove(this, event)
if true {
return
}
// log.Println(event)
if !this.hovered {
this.hovered = true
for _, room := range uictx.ctitmdl {
if room != this {
room.OnHover(false)
}
}
if !this.pressed {
this.OnHover(true)
}
}
}
_ = onMouseMove
onMouseLeave := func(event *qtcore.QEvent) {
this.OnHover(false)
}
onMouseEnter := func(event *qtcore.QEvent) {
this.OnHover(true)
}
w.InheritMousePressEvent(onMousePress)
w.InheritMouseReleaseEvent(onMouseRelease)
// w.InheritMouseMoveEvent(onMouseMove)
w.InheritLeaveEvent(onMouseLeave)
w.InheritEnterEvent(onMouseEnter)
for _, lab := range labs {
lab.InheritMousePressEvent(onMousePress)
lab.InheritMouseReleaseEvent(onMouseRelease)
// lab.InheritMouseMoveEvent(onMouseMove)
}
w.InheritContextMenuEvent(func(event *qtgui.QContextMenuEvent) {
gpos := event.GlobalPos()
log.Println(event.Type(), gpos.X(), gpos.Y())
if this.OnConextMenu != nil {
this.OnConextMenu(w, gpos)
}
})
}
func (this *RoomListItem) OnContextMenu2(gpos *qtcore.QPoint) {
w := this.ContactItemView
if this.OnConextMenu != nil {
this.OnConextMenu(w, gpos)
}
}
func (this *RoomListItem) SetContactInfo(info interface{}) {
trtxt := gopp.StrSuf4ui
switch ct := info.(type) {
case *thspbs.FriendInfo:
this.frndInfo = ct
name := gopp.IfElseStr(ct.GetName() == "", ct.GetPubkey()[:7], ct.GetName())
nametip := gopp.IfElseStr(ct.GetName() == "", ct.GetPubkey()[:17], ct.GetName())
this.Label_2.SetText(trtxt(name, 26))
this.Label_2.SetToolTip(nametip)
this.Label_4.SetText(trtxt(ct.GetStmsg(), 36))
this.Label_4.SetToolTip(ct.GetStmsg())
SetQLabelElideText(this.Label_2, name, "")
SetQLabelElideText(this.Label_4, ct.Stmsg, "")
avataricon := store.GetFSC().GetFilePath(ct.GetPubkey())
if gopp.FileExist(avataricon) {
this.cticon = qtgui.NewQIcon2(avataricon)
this.ToolButton_2.SetIcon(this.cticon)
} else {
this.cticon = GetIdentIcon(ct.Pubkey)
this.ToolButton_2.SetIcon(this.cticon)
}
if ct.GetConnStatus() == 0 {
this.sticon = qtgui.NewQIcon2(":/icons/offline_30.png")
this.ToolButton.SetIcon(this.sticon)
}
if ct.Fnum == thscom.FileHelperFnum {
this.cticon = qtgui.NewQIcon2(":/icons/filehelper.png")
this.ToolButton_2.SetIcon(this.cticon)
}
case *thspbs.GroupInfo:
this.grpInfo = ct
this.isgroup = true
this.Label_4.SetHidden(true)
this.Label_2.SetText(trtxt(ct.GetTitle(), 26))
this.Label_2.SetToolTip(ct.GetTitle())
suff := fmt.Sprintf(" %s%d", NameNumSep(), len(ct.Members))
SetQLabelElideText(this.Label_2, ct.Title, suff)
// this maybe call multiple times, so -20 -20 then, the item is 0 height.
// this.QWidget_PTR().SetFixedHeight(this.QWidget_PTR().Height() - 20)
if false {
this.cticon = qtgui.NewQIcon2(":/icons/groupgray.png")
} else {
this.cticon = GetInitAvatar(gopp.IfElseStr(ct.Title == "", ct.GroupId, ct.Title))
}
this.ToolButton_2.SetIcon(this.cticon)
this.setConnStatus(int32(thscli.CONN_STATUS_UDP))
default:
log.Fatalln("wtf")
}
this.ToolButton_2.SetToolTip(this.GetName() + "." + gopp.SubStr(this.GetId(), 7))
}
func (this *RoomListItem) AddMessage(msgo *Message, prev bool) {
// check in list
for _, msgoe := range this.msgos {
if msgoe.EventId == msgo.EventId && msgo.EventId != 0 {
log.Printf("msg already in list: %d, %+v\n", msgo.EventId, msgo)
return
}
}
if prev {
this.msgos = append([]*Message{msgo}, this.msgos...)
msgiw := NewMessageItem()
msgiw.Sent = msgo.Sent
msgiw.UserCode = msgo.UserCode
this.msgitmdl = append([]*MessageItem{msgiw}, this.msgitmdl...)
this.AddMessageImpl(msgo, msgiw, prev)
this.UpdateMessageMimeContent(msgo, msgiw)
} else {
this.msgos = append(this.msgos, msgo)
msgiw := NewMessageItem()
msgiw.Sent = msgo.Sent
msgiw.UserCode = msgo.UserCode
this.msgitmdl = append(this.msgitmdl, msgiw)
this.AddMessageImpl(msgo, msgiw, prev)
this.UpdateMessageMimeContent(msgo, msgiw)
// test and update storage's sync info
if msgo.EventId >= this.timeline | this.ToolButton_2.SetMouseTracking(true)
w := this.ContactItemView
w.SetMouseTracking(true) | random_line_split |
room_list_item.go | ui
switch ct := info.(type) {
case *thspbs.FriendInfo:
this.frndInfo = ct
name := gopp.IfElseStr(ct.GetName() == "", ct.GetPubkey()[:7], ct.GetName())
nametip := gopp.IfElseStr(ct.GetName() == "", ct.GetPubkey()[:17], ct.GetName())
this.Label_2.SetText(trtxt(name, 26))
this.Label_2.SetToolTip(nametip)
this.Label_4.SetText(trtxt(ct.GetStmsg(), 36))
this.Label_4.SetToolTip(ct.GetStmsg())
SetQLabelElideText(this.Label_2, name, "")
SetQLabelElideText(this.Label_4, ct.Stmsg, "")
avataricon := store.GetFSC().GetFilePath(ct.GetPubkey())
if gopp.FileExist(avataricon) {
this.cticon = qtgui.NewQIcon2(avataricon)
this.ToolButton_2.SetIcon(this.cticon)
} else {
this.cticon = GetIdentIcon(ct.Pubkey)
this.ToolButton_2.SetIcon(this.cticon)
}
if ct.GetConnStatus() == 0 {
this.sticon = qtgui.NewQIcon2(":/icons/offline_30.png")
this.ToolButton.SetIcon(this.sticon)
}
if ct.Fnum == thscom.FileHelperFnum {
this.cticon = qtgui.NewQIcon2(":/icons/filehelper.png")
this.ToolButton_2.SetIcon(this.cticon)
}
case *thspbs.GroupInfo:
this.grpInfo = ct
this.isgroup = true
this.Label_4.SetHidden(true)
this.Label_2.SetText(trtxt(ct.GetTitle(), 26))
this.Label_2.SetToolTip(ct.GetTitle())
suff := fmt.Sprintf(" %s%d", NameNumSep(), len(ct.Members))
SetQLabelElideText(this.Label_2, ct.Title, suff)
// this maybe call multiple times, so -20 -20 then, the item is 0 height.
// this.QWidget_PTR().SetFixedHeight(this.QWidget_PTR().Height() - 20)
if false {
this.cticon = qtgui.NewQIcon2(":/icons/groupgray.png")
} else {
this.cticon = GetInitAvatar(gopp.IfElseStr(ct.Title == "", ct.GroupId, ct.Title))
}
this.ToolButton_2.SetIcon(this.cticon)
this.setConnStatus(int32(thscli.CONN_STATUS_UDP))
default:
log.Fatalln("wtf")
}
this.ToolButton_2.SetToolTip(this.GetName() + "." + gopp.SubStr(this.GetId(), 7))
}
func (this *RoomListItem) AddMessage(msgo *Message, prev bool) {
// check in list
for _, msgoe := range this.msgos {
if msgoe.EventId == msgo.EventId && msgo.EventId != 0 {
log.Printf("msg already in list: %d, %+v\n", msgo.EventId, msgo)
return
}
}
if prev {
this.msgos = append([]*Message{msgo}, this.msgos...)
msgiw := NewMessageItem()
msgiw.Sent = msgo.Sent
msgiw.UserCode = msgo.UserCode
this.msgitmdl = append([]*MessageItem{msgiw}, this.msgitmdl...)
this.AddMessageImpl(msgo, msgiw, prev)
this.UpdateMessageMimeContent(msgo, msgiw)
} else {
this.msgos = append(this.msgos, msgo)
msgiw := NewMessageItem()
msgiw.Sent = msgo.Sent
msgiw.UserCode = msgo.UserCode
this.msgitmdl = append(this.msgitmdl, msgiw)
this.AddMessageImpl(msgo, msgiw, prev)
this.UpdateMessageMimeContent(msgo, msgiw)
// test and update storage's sync info
if msgo.EventId >= this.timeline.NextBatch {
this.timeline.NextBatch = msgo.EventId + 1
this.WaitSyncStoreTimeLineCount += 1
if this.WaitSyncStoreTimeLineCount >= 1 /*common.PullPageSize*/ {
this.WaitSyncStoreTimeLineCount = 0
go hisfet.RefreshPrevStorageTimeLine(&this.timeline, this.GetId(), this.GetName())
}
}
}
}
func (this *RoomListItem) AddMessageImpl(msgo *Message, msgiw *MessageItem, prev bool) {
showMeIcon := msgo.Me // 是否显示自己的icon。根据是否是自己的消息
showName := true
showPeerIcon := !showMeIcon
msgiw.Label_5.SetText(msgo.MsgUi)
msgiw.LabelUserName4MessageItem.SetText(fmt.Sprintf("%s", msgo.PeerNameUi))
msgiw.LabelMsgTime.SetText(Time2Today(msgo.Time))
msgiw.LabelMsgTime.SetToolTip(gopp.TimeToFmt1(msgo.Time))
msgiw.ToolButton_3.SetVisible(showMeIcon)
msgiw.ToolButton_2.SetVisible(showPeerIcon)
msgiw.LabelUserName4MessageItem.SetVisible(showName)
msgiw.ToolButton.SetVisible(false)
if msgo.Me && !msgo.Sent {
msgiw.LabelSendState.SetPixmap(qtgui.NewQPixmap3p(":/icons/[email protected]"))
}
if msgo.Me {
msgiw.LabelSendState.SetToolTip(gopp.ToStr(gopp.ToStrs(msgo.Sent, msgo.UserCode)))
} else /*!msgo.Me*/ {
// msgiw.LabelSendState.SetVisible(false)
}
if uictx.msgwin.item == this {
vlo3 := uictx.uiw.VerticalLayout_3
if prev {
vlo3.InsertWidgetp(0, msgiw.QWidget_PTR())
} else {
vlo3.Layout().AddWidget(msgiw.QWidget_PTR())
}
}
this.SetLastMsg(fmt.Sprintf("%s: %s",
gopp.StrSuf4ui(msgo.PeerNameUi, 9, 1), msgo.LastMsgUi), msgo.Time, msgo.EventId)
this.totalCount += 1
if uictx.msgwin.item == this {
uictx.uiw.LabelMsgCount2.SetText(fmt.Sprintf("%3d", this.totalCount))
uictx.uiw.LabelMsgCount.SetText(fmt.Sprintf("%3d", this.totalCount))
}
this.unreadedCount += 1
this.ToolButton.SetText(fmt.Sprintf("%d", this.unreadedCount))
// this.floatUnreadCountLabel.SetText(fmt.Sprintf("%d", this.unreadedCount))
}
func (this *RoomListItem) UpdateMessageMimeContent(msgo *Message, msgiw *MessageItem) {
if !msgo.IsFile() {
return
}
fil := msgo.GetFileInfoLine()
gopp.NilPrint(fil, msgo.Msg)
if fil == nil {
return
}
locfname := store.GetFSC().GetFilePath(fil.Md5str)
rmturl := thscli.HttpFsUrlFor(fil.Md5str)
reloadMsgItem := func(txt string) { msgiw.Label_5.SetText(txt) }
locdir := store.GetFSC().GetDir()
if ok, _ := afero.Exists(afero.NewOsFs(), locfname); ok {
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
} else {
richtxt := fmt.Sprintf("Loading... %s: %s", fil.Mime, humanize.Bytes(uint64(fil.Length)))
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
go func() {
time.Sleep(3 * time.Second)
ro := &grequests.RequestOptions{}
resp, err := grequests.Get(rmturl, ro)
gopp.ErrPrint(err, rmturl)
err = resp.DownloadToFile(locfname)
gopp.ErrPrint(err, rmturl)
runOnUiThread(func() { reloadMsgItem("Switching...") })
time.Sleep(3 * time.Second)
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
runOnUiThread(func() { reloadMsgItem(richtxt) })
}()
}
}
func (this *RoomListItem) UpdateMessageState(msgo *Message) {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.UserCode == msgo.UserCode {
msgo_.EventId = msgo.EventId
msgo_.Sent = msgo.Sent
break
}
}
for idx := len(this.msgitmdl) - 1; idx >= 0; idx-- {
msgitm := this.msgitmdl[idx]
if msgitm.UserCode == msgo.UserCode {
if !msgitm.Sent && msgo.Sent {
msgitm.Sent = msgo.Sent
msgitm.LabelSendState.Clear()
msgitm.LabelSendState.SetToolTip(gopp.ToStr(gopp.ToStrs(msgo.Sent, msgo.UserCode)))
}
break
}
}
}
func (this *RoomListItem) ClearAvatar(frndpk string) {
this.c | ticon = Get | identifier_name |
|
room_list_item.go | TimeLine(&this.timeline, this.GetId(), this.GetName())
}
}
}
}
func (this *RoomListItem) AddMessageImpl(msgo *Message, msgiw *MessageItem, prev bool) {
showMeIcon := msgo.Me // 是否显示自己的icon。根据是否是自己的消息
showName := true
showPeerIcon := !showMeIcon
msgiw.Label_5.SetText(msgo.MsgUi)
msgiw.LabelUserName4MessageItem.SetText(fmt.Sprintf("%s", msgo.PeerNameUi))
msgiw.LabelMsgTime.SetText(Time2Today(msgo.Time))
msgiw.LabelMsgTime.SetToolTip(gopp.TimeToFmt1(msgo.Time))
msgiw.ToolButton_3.SetVisible(showMeIcon)
msgiw.ToolButton_2.SetVisible(showPeerIcon)
msgiw.LabelUserName4MessageItem.SetVisible(showName)
msgiw.ToolButton.SetVisible(false)
if msgo.Me && !msgo.Sent {
msgiw.LabelSendState.SetPixmap(qtgui.NewQPixmap3p(":/icons/[email protected]"))
}
if msgo.Me {
msgiw.LabelSendState.SetToolTip(gopp.ToStr(gopp.ToStrs(msgo.Sent, msgo.UserCode)))
} else /*!msgo.Me*/ {
// msgiw.LabelSendState.SetVisible(false)
}
if uictx.msgwin.item == this {
vlo3 := uictx.uiw.VerticalLayout_3
if prev {
vlo3.InsertWidgetp(0, msgiw.QWidget_PTR())
} else {
vlo3.Layout().AddWidget(msgiw.QWidget_PTR())
}
}
this.SetLastMsg(fmt.Sprintf("%s: %s",
gopp.StrSuf4ui(msgo.PeerNameUi, 9, 1), msgo.LastMsgUi), msgo.Time, msgo.EventId)
this.totalCount += 1
if uictx.msgwin.item == this {
uictx.uiw.LabelMsgCount2.SetText(fmt.Sprintf("%3d", this.totalCount))
uictx.uiw.LabelMsgCount.SetText(fmt.Sprintf("%3d", this.totalCount))
}
this.unreadedCount += 1
this.ToolButton.SetText(fmt.Sprintf("%d", this.unreadedCount))
// this.floatUnreadCountLabel.SetText(fmt.Sprintf("%d", this.unreadedCount))
}
func (this *RoomListItem) UpdateMessageMimeContent(msgo *Message, msgiw *MessageItem) {
if !msgo.IsFile() {
return
}
fil := msgo.GetFileInfoLine()
gopp.NilPrint(fil, msgo.Msg)
if fil == nil {
return
}
locfname := store.GetFSC().GetFilePath(fil.Md5str)
rmturl := thscli.HttpFsUrlFor(fil.Md5str)
reloadMsgItem := func(txt string) { msgiw.Label_5.SetText(txt) }
locdir := store.GetFSC().GetDir()
if ok, _ := afero.Exists(afero.NewOsFs(), locfname); ok {
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
} else {
richtxt := fmt.Sprintf("Loading... %s: %s", fil.Mime, humanize.Bytes(uint64(fil.Length)))
log.Println(msgo.Msg, richtxt)
reloadMsgItem(richtxt)
go func() {
time.Sleep(3 * time.Second)
ro := &grequests.RequestOptions{}
resp, err := grequests.Get(rmturl, ro)
gopp.ErrPrint(err, rmturl)
err = resp.DownloadToFile(locfname)
gopp.ErrPrint(err, rmturl)
runOnUiThread(func() { reloadMsgItem("Switching...") })
time.Sleep(3 * time.Second)
richtxt := Msg2FileText(fil, locdir)
log.Println(msgo.Msg, richtxt)
runOnUiThread(func() { reloadMsgItem(richtxt) })
}()
}
}
func (this *RoomListItem) UpdateMessageState(msgo *Message) {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.UserCode == msgo.UserCode {
msgo_.EventId = msgo.EventId
msgo_.Sent = msgo.Sent
break
}
}
for idx := len(this.msgitmdl) - 1; idx >= 0; idx-- {
msgitm := this.msgitmdl[idx]
if msgitm.UserCode == msgo.UserCode {
if !msgitm.Sent && msgo.Sent {
msgitm.Sent = msgo.Sent
msgitm.LabelSendState.Clear()
msgitm.LabelSendState.SetToolTip(gopp.ToStr(gopp.ToStrs(msgo.Sent, msgo.UserCode)))
}
break
}
}
}
func (this *RoomListItem) ClearAvatar(frndpk string) {
this.cticon = GetIdentIcon(frndpk)
this.ToolButton_2.SetIcon(this.cticon)
uictx.msgwin.SetIconForItem(this)
}
func (this *RoomListItem) SetAvatar(idico *qtgui.QIcon) {
this.cticon = idico
this.ToolButton_2.SetIcon(this.cticon)
uictx.msgwin.SetIconForItem(this)
}
func (this *RoomListItem) SetAvatarForId(frndpk string) {
locfname := store.GetFSC().GetFilePath(frndpk)
idico := qtgui.NewQIcon2(locfname)
this.SetAvatar(idico)
}
func (this *RoomListItem) SetAvatarForMessage(msgo *Message, frndpk string) {
fil := msgo.GetFileInfoLine()
gopp.NilPrint(fil, msgo.Msg)
if fil == nil {
return
}
locfname := store.GetFSC().GetFilePath(frndpk)
rmturl := thscli.HttpFsUrlFor(frndpk)
setFriendIcon := func(thefname string) {
icon := qtgui.NewQIcon2(thefname)
if icon != nil && !icon.IsNull() {
this.SetAvatar(icon)
} else {
log.Println("Friend icon not supported:", locfname)
}
}
if fil.Length == 0 { // clear avatar
this.ClearAvatar(frndpk)
return
}
go func() {
ro := &grequests.RequestOptions{}
resp, err := grequests.Get(rmturl, ro)
gopp.ErrPrint(err, rmturl)
err = resp.DownloadToFile(locfname)
gopp.ErrPrint(err, rmturl)
runOnUiThread(func() { setFriendIcon(locfname) })
}()
}
func (this *RoomListItem) FindMessageByUserCode(userCode int64) *Message {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.UserCode == userCode {
return msgo_
}
}
return nil
}
func (this *RoomListItem) FindMessageViewByEventId(eventId int64) *MessageItem {
for idx := len(this.msgos) - 1; idx >= 0; idx-- {
msgo_ := this.msgos[idx]
if msgo_.EventId == eventId {
return this.msgitmdl[idx]
}
}
return nil
}
// TODO 计算是否省略掉显示与上一条相同的用户名
func (this *RoomListItem) AddMessageHiddenCloseSameUser(prev bool) {
// prev is true, compare [0], [1]
// prev is false, compare [len-2], [len-1]
if len(this.msgos) < 2 {
return
}
var m0, m1 *Message
if prev {
m0 = this.msgos[0]
m1 = this.msgos[1]
} else {
m0 = this.msgos[len(this.msgos)-2]
m1 = this.msgos[len(this.msgos)-1]
}
if m0.PeerNameUi == m1.PeerNameUi {
// can not get Ui_MessageItemView
}
}
func (this *RoomListItem) GetName() string {
return gopp.IfElseStr(this.isgroup, this.grpInfo.GetTitle(), this.frndInfo.GetName())
}
func (this *RoomListItem) GetId() string {
if this.isgroup {
// log.Println(this.grpInfo.GetGroupId(), this.grpInfo.Title)
}
return gopp.IfElseStr(this.isgroup, this.grpInfo.GetGroupId(), this.frndInfo.GetPubkey())
}
func (this *RoomListItem) GetNum() uint32 {
return uint32(gopp.IfElseInt(this.isgroup, int(this.grpInfo.GetGnum()), int(this.frndInfo.GetFnum())))
}
func (this *RoomListItem) UpdateName(name string) {
if this.isgroup {
if this.grpInfo.Title != name {
this.grpInfo.Title = name
this.SetContactInfo(this.grpInfo)
// | this.Label_2.SetText(gopp.StrSuf4ui(name, 26))
// this.Label_2.SetToolTip(name)
// this.ToolButton_2.SetToolTip(name + "." + this.GetId()[:7])
}
} else {
if this.frndInfo.Name != name {
this.frndInfo.Name = name
| conditional_block |
|
bibtexParser.ts | (this);
}
}
type Node =
| RootNode
| TextNode
| BlockNode
| EntryNode
| CommentNode
| PreambleNode
| StringNode
| FieldNode
| ConcatNode
| LiteralNode
| BracedNode
| QuotedNode;
export function generateAST(input: string): RootNode {
const rootNode = new RootNode();
let node: Node = rootNode;
let line = 1;
let column = 0;
for (let i = 0; i < input.length; i++) {
const char = input[i]!;
const prev = input[i - 1]!;
if (char === '\n') {
line++;
column = 0;
}
column++;
switch (node.type) {
case 'root': {
node = char === '@' ? new BlockNode(node) : new TextNode(node, char);
break;
}
case 'text': {
// Whitespace or closing curly brace should precede an entry. This might
// not be correct but allows parsing of "valid" bibtex files in the
// wild.
if (char === '@' && /[\s\r\n}]/.test(prev)) {
node = new BlockNode(node.parent);
} else {
node.text += char;
}
break;
}
case 'block': {
if (char === '@') {
// everything prior to this was a comment
const prevNode =
node.parent.children[node.parent.children.length - 2];
if (prevNode?.type === 'text') {
prevNode.text += '@' + node.command;
} else {
// insert text node 1 from the end
node.parent.children.pop();
new TextNode(node.parent, '@' + node.command);
node.parent.children.push(node);
}
node.command = '';
} else if (char === '{' || char === '(') {
const commandTrimmed = node.command.trim();
if (commandTrimmed === '' || /\s/.test(commandTrimmed)) {
// A block without a command is invalid. It's sometimes used in comments though, e.g. @(#)
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command = commandTrimmed;
const command: string = node.command.toLowerCase();
const [braces, parens] = char === '{' ? [1, 0] : [0, 1];
const raw = '@' + command + char;
switch (command) {
case 'string':
node = new StringNode(node, raw, braces, parens);
break;
case 'preamble':
node = new PreambleNode(node, raw, braces, parens);
break;
case 'comment':
node = new CommentNode(node, raw, braces, parens);
break;
default:
node = new EntryNode(node, char);
break;
}
}
} else if (char.match(/[=#,})[\]]/)) {
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command += char;
}
break;
}
case 'comment':
case 'string':
case 'preamble':
if (char === '{') {
node.braces++;
} else if (char === '}') {
node.braces--;
} else if (char === '(') {
node.parens++;
} else if (char === ')') {
node.parens--;
}
node.raw += char;
if (node.braces === 0 && node.parens === 0) {
node = node.parent.parent; // root
}
break;
case 'entry': {
if (isWhitespace(char)) {
if (!node.key) {
// Before key, ignore
} else {
// Ensure subsequent characters are not appended to the key
node.keyEnded = true;
}
} else if (char === ',') {
node = new FieldNode(node);
} else if (
(node.wrapType === '{' && char === '}') ||
(node.wrapType === '(' && char === ')')
) {
node = node.parent.parent; // root
} else if (char === '=' && node.key && isValidFieldName(node.key)) {
// Entry has no key, this is a field name
const field: FieldNode = new FieldNode(node, node.key);
node.fields.push(field);
node.key = undefined;
node = field.value;
} else if (node.keyEnded) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain whitespace`
);
} else if (!isValidKeyCharacter(char)) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain the character (${char})`
);
} else {
node.key = (node.key ?? '') + char;
}
break;
}
case 'field': {
if (char === '}' || char === ')') {
node.name = node.name.trim();
node = node.parent.parent.parent; // root
} else if (char === '=') {
node.name = node.name.trim();
node = node.value;
} else if (char === ',') {
node.name = node.name.trim();
node = new FieldNode(node.parent);
} else if (!isValidFieldName(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else if (!node.name) {
if (!isWhitespace(char)) {
node.parent.fields.push(node);
node.name = char;
} else {
// noop
}
} else {
node.name += char;
}
break;
}
case 'concat': {
if (isWhitespace(char)) {
break; // noop
} else if (node.canConsumeValue) {
if (/[#=,}()[\]]/.test(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else {
node.canConsumeValue = false;
if (char === '{') {
node = new BracedNode(node);
} else if (char === '"') {
node = new QuotedNode(node);
} else {
node = new LiteralNode(node, char);
}
}
} else {
if (char === ',') {
node = new FieldNode(node.parent.parent);
} else if (char === '}' || char === ')') {
node = node.parent.parent.parent.parent; // root
} else if (char === '#') {
node.canConsumeValue = true;
} else {
throw new BibTeXSyntaxError(input, node, i, line, column);
}
}
break;
}
case 'literal':
if (isWhitespace(char)) {
// end of literal
node = node.parent;
} else if (char === ',') {
node = new FieldNode(node.parent.parent.parent);
} else if (char === '}') {
node = node.parent.parent.parent.parent.parent; // root
} else if (char === '#') {
node = node.parent;
node.canConsumeValue = true;
} else {
node.value += char;
}
break;
// Values may be enclosed in curly braces. Curly braces may be used within
// the value but they must be balanced.
case 'braced':
if (char === '}' && node.depth === 0) {
node = node.parent; // values
break;
} else if (char === '{') {
node.depth++;
} else if (char === '}') {
node.depth--;
}
node.value += char;
break;
// Values may be enclosed in double quotes. Curly braces may be used
// within quoted values but they must be balanced.
//
// To escape a double quote, surround it with braces `{"}`.
// https://web.archive.org/web/20210422110817/https://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
case 'quoted':
if (char === '"' && node.depth === 0) {
node = node.parent; // values
break;
} else if (char === '{') {
node.depth++;
} else if (char === '}') {
node.depth--;
if (node.depth < 0) {
throw new BibTeXSyntaxError(input, node, i, line, column);
}
}
node.value += char;
break;
}
}
return rootNode;
}
function | isWhitespace | identifier_name |
|
bibtexParser.ts | string,
public braces: number,
public parens: number
) {
parent.block = this;
}
}
export class EntryNode {
type = 'entry' as const;
key?: string;
keyEnded?: boolean;
fields: FieldNode[];
constructor(public parent: BlockNode, public wrapType: '{' | '(') {
parent.block = this;
this.fields = [];
}
}
export class FieldNode {
type = 'field' as const;
/** Each value is concatenated */
value: ConcatNode;
constructor(public parent: EntryNode, public name: string = '') {
this.value = new ConcatNode(this);
}
}
class ConcatNode {
type = 'concat' as const;
concat: (LiteralNode | BracedNode | QuotedNode)[];
canConsumeValue = true;
constructor(public parent: FieldNode) {
this.concat = [];
}
}
class LiteralNode {
type = 'literal' as const;
constructor(public parent: ConcatNode, public value: string) {
parent.concat.push(this);
}
}
class BracedNode {
type = 'braced' as const;
value = '';
/** Used to count opening and closing braces */
depth = 0;
constructor(public parent: ConcatNode) {
parent.concat.push(this);
}
}
class QuotedNode {
type = 'quoted' as const;
value = '';
/** Used to count opening and closing braces */
depth = 0;
constructor(public parent: ConcatNode) {
parent.concat.push(this);
}
}
type Node =
| RootNode
| TextNode
| BlockNode
| EntryNode
| CommentNode
| PreambleNode
| StringNode
| FieldNode
| ConcatNode
| LiteralNode
| BracedNode
| QuotedNode;
export function generateAST(input: string): RootNode {
const rootNode = new RootNode();
let node: Node = rootNode;
let line = 1;
let column = 0;
for (let i = 0; i < input.length; i++) {
const char = input[i]!;
const prev = input[i - 1]!;
if (char === '\n') {
line++;
column = 0;
}
column++;
switch (node.type) {
case 'root': {
node = char === '@' ? new BlockNode(node) : new TextNode(node, char);
break;
}
case 'text': {
// Whitespace or closing curly brace should precede an entry. This might
// not be correct but allows parsing of "valid" bibtex files in the
// wild.
if (char === '@' && /[\s\r\n}]/.test(prev)) {
node = new BlockNode(node.parent);
} else {
node.text += char;
}
break;
}
case 'block': {
if (char === '@') {
// everything prior to this was a comment
const prevNode =
node.parent.children[node.parent.children.length - 2];
if (prevNode?.type === 'text') {
prevNode.text += '@' + node.command;
} else {
// insert text node 1 from the end
node.parent.children.pop(); | node.parent.children.push(node);
}
node.command = '';
} else if (char === '{' || char === '(') {
const commandTrimmed = node.command.trim();
if (commandTrimmed === '' || /\s/.test(commandTrimmed)) {
// A block without a command is invalid. It's sometimes used in comments though, e.g. @(#)
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command = commandTrimmed;
const command: string = node.command.toLowerCase();
const [braces, parens] = char === '{' ? [1, 0] : [0, 1];
const raw = '@' + command + char;
switch (command) {
case 'string':
node = new StringNode(node, raw, braces, parens);
break;
case 'preamble':
node = new PreambleNode(node, raw, braces, parens);
break;
case 'comment':
node = new CommentNode(node, raw, braces, parens);
break;
default:
node = new EntryNode(node, char);
break;
}
}
} else if (char.match(/[=#,})[\]]/)) {
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command += char;
}
break;
}
case 'comment':
case 'string':
case 'preamble':
if (char === '{') {
node.braces++;
} else if (char === '}') {
node.braces--;
} else if (char === '(') {
node.parens++;
} else if (char === ')') {
node.parens--;
}
node.raw += char;
if (node.braces === 0 && node.parens === 0) {
node = node.parent.parent; // root
}
break;
case 'entry': {
if (isWhitespace(char)) {
if (!node.key) {
// Before key, ignore
} else {
// Ensure subsequent characters are not appended to the key
node.keyEnded = true;
}
} else if (char === ',') {
node = new FieldNode(node);
} else if (
(node.wrapType === '{' && char === '}') ||
(node.wrapType === '(' && char === ')')
) {
node = node.parent.parent; // root
} else if (char === '=' && node.key && isValidFieldName(node.key)) {
// Entry has no key, this is a field name
const field: FieldNode = new FieldNode(node, node.key);
node.fields.push(field);
node.key = undefined;
node = field.value;
} else if (node.keyEnded) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain whitespace`
);
} else if (!isValidKeyCharacter(char)) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain the character (${char})`
);
} else {
node.key = (node.key ?? '') + char;
}
break;
}
case 'field': {
if (char === '}' || char === ')') {
node.name = node.name.trim();
node = node.parent.parent.parent; // root
} else if (char === '=') {
node.name = node.name.trim();
node = node.value;
} else if (char === ',') {
node.name = node.name.trim();
node = new FieldNode(node.parent);
} else if (!isValidFieldName(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else if (!node.name) {
if (!isWhitespace(char)) {
node.parent.fields.push(node);
node.name = char;
} else {
// noop
}
} else {
node.name += char;
}
break;
}
case 'concat': {
if (isWhitespace(char)) {
break; // noop
} else if (node.canConsumeValue) {
if (/[#=,}()[\]]/.test(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else {
node.canConsumeValue = false;
if (char === '{') {
node = new BracedNode(node);
} else if (char === '"') {
node = new QuotedNode(node);
} else {
node = new LiteralNode(node, char);
}
}
} else {
if (char === ',') {
node = new FieldNode(node.parent.parent);
} else if (char === '}' || char === ')') {
node = node.parent.parent.parent.parent; // root
} else if (char === '#') {
node.canConsumeValue = true;
} else {
throw new BibTeXSyntaxError(input, node, i, line, column);
}
}
break;
}
case 'literal':
if (isWhitespace(char)) {
// end of literal
node = node.parent;
} else if (char === ',') {
node = new FieldNode(node.parent.parent.parent);
} else if (char === '}') {
node = node.parent.parent.parent.parent.parent; // root
} else if (char === '#') {
node = node.parent;
node.canConsumeValue = true;
} else {
node.value += char;
}
break;
// Values may be | new TextNode(node.parent, '@' + node.command); | random_line_split |
bibtexParser.ts | RootNode();
let node: Node = rootNode;
let line = 1;
let column = 0;
for (let i = 0; i < input.length; i++) {
const char = input[i]!;
const prev = input[i - 1]!;
if (char === '\n') {
line++;
column = 0;
}
column++;
switch (node.type) {
case 'root': {
node = char === '@' ? new BlockNode(node) : new TextNode(node, char);
break;
}
case 'text': {
// Whitespace or closing curly brace should precede an entry. This might
// not be correct but allows parsing of "valid" bibtex files in the
// wild.
if (char === '@' && /[\s\r\n}]/.test(prev)) {
node = new BlockNode(node.parent);
} else {
node.text += char;
}
break;
}
case 'block': {
if (char === '@') {
// everything prior to this was a comment
const prevNode =
node.parent.children[node.parent.children.length - 2];
if (prevNode?.type === 'text') {
prevNode.text += '@' + node.command;
} else {
// insert text node 1 from the end
node.parent.children.pop();
new TextNode(node.parent, '@' + node.command);
node.parent.children.push(node);
}
node.command = '';
} else if (char === '{' || char === '(') {
const commandTrimmed = node.command.trim();
if (commandTrimmed === '' || /\s/.test(commandTrimmed)) {
// A block without a command is invalid. It's sometimes used in comments though, e.g. @(#)
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command = commandTrimmed;
const command: string = node.command.toLowerCase();
const [braces, parens] = char === '{' ? [1, 0] : [0, 1];
const raw = '@' + command + char;
switch (command) {
case 'string':
node = new StringNode(node, raw, braces, parens);
break;
case 'preamble':
node = new PreambleNode(node, raw, braces, parens);
break;
case 'comment':
node = new CommentNode(node, raw, braces, parens);
break;
default:
node = new EntryNode(node, char);
break;
}
}
} else if (char.match(/[=#,})[\]]/)) {
// replace the block node
node.parent.children.pop();
node = new TextNode(node.parent, '@' + node.command + char);
} else {
node.command += char;
}
break;
}
case 'comment':
case 'string':
case 'preamble':
if (char === '{') {
node.braces++;
} else if (char === '}') {
node.braces--;
} else if (char === '(') {
node.parens++;
} else if (char === ')') {
node.parens--;
}
node.raw += char;
if (node.braces === 0 && node.parens === 0) {
node = node.parent.parent; // root
}
break;
case 'entry': {
if (isWhitespace(char)) {
if (!node.key) {
// Before key, ignore
} else {
// Ensure subsequent characters are not appended to the key
node.keyEnded = true;
}
} else if (char === ',') {
node = new FieldNode(node);
} else if (
(node.wrapType === '{' && char === '}') ||
(node.wrapType === '(' && char === ')')
) {
node = node.parent.parent; // root
} else if (char === '=' && node.key && isValidFieldName(node.key)) {
// Entry has no key, this is a field name
const field: FieldNode = new FieldNode(node, node.key);
node.fields.push(field);
node.key = undefined;
node = field.value;
} else if (node.keyEnded) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain whitespace`
);
} else if (!isValidKeyCharacter(char)) {
throw new BibTeXSyntaxError(
input,
node,
i,
line,
column,
`The entry key cannot contain the character (${char})`
);
} else {
node.key = (node.key ?? '') + char;
}
break;
}
case 'field': {
if (char === '}' || char === ')') {
node.name = node.name.trim();
node = node.parent.parent.parent; // root
} else if (char === '=') {
node.name = node.name.trim();
node = node.value;
} else if (char === ',') {
node.name = node.name.trim();
node = new FieldNode(node.parent);
} else if (!isValidFieldName(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else if (!node.name) {
if (!isWhitespace(char)) {
node.parent.fields.push(node);
node.name = char;
} else {
// noop
}
} else {
node.name += char;
}
break;
}
case 'concat': {
if (isWhitespace(char)) {
break; // noop
} else if (node.canConsumeValue) {
if (/[#=,}()[\]]/.test(char)) {
throw new BibTeXSyntaxError(input, node, i, line, column);
} else {
node.canConsumeValue = false;
if (char === '{') {
node = new BracedNode(node);
} else if (char === '"') {
node = new QuotedNode(node);
} else {
node = new LiteralNode(node, char);
}
}
} else {
if (char === ',') {
node = new FieldNode(node.parent.parent);
} else if (char === '}' || char === ')') {
node = node.parent.parent.parent.parent; // root
} else if (char === '#') {
node.canConsumeValue = true;
} else {
throw new BibTeXSyntaxError(input, node, i, line, column);
}
}
break;
}
case 'literal':
if (isWhitespace(char)) {
// end of literal
node = node.parent;
} else if (char === ',') {
node = new FieldNode(node.parent.parent.parent);
} else if (char === '}') {
node = node.parent.parent.parent.parent.parent; // root
} else if (char === '#') {
node = node.parent;
node.canConsumeValue = true;
} else {
node.value += char;
}
break;
// Values may be enclosed in curly braces. Curly braces may be used within
// the value but they must be balanced.
case 'braced':
if (char === '}' && node.depth === 0) {
node = node.parent; // values
break;
} else if (char === '{') {
node.depth++;
} else if (char === '}') {
node.depth--;
}
node.value += char;
break;
// Values may be enclosed in double quotes. Curly braces may be used
// within quoted values but they must be balanced.
//
// To escape a double quote, surround it with braces `{"}`.
// https://web.archive.org/web/20210422110817/https://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
case 'quoted':
if (char === '"' && node.depth === 0) {
node = node.parent; // values
break;
} else if (char === '{') {
node.depth++;
} else if (char === '}') {
node.depth--;
if (node.depth < 0) {
throw new BibTeXSyntaxError(input, node, i, line, column);
}
}
node.value += char;
break;
}
}
return rootNode;
}
function isWhitespace(string: string): boolean {
return /^[ \t\n\r]*$/.test(string);
}
/**
* Certain characters are special in latex: {}%#$~. These cannot be used in
* \cite without error. See https://tex.stackexchange.com/a/408548
*/
function isValidKeyCharacter(char: string): boolean | {
return !/[#%{}~$,]/.test(char);
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.