repo
string | commit
string | message
string | diff
string |
---|---|---|---|
arthur-debert/google-code-issues-migrator
|
17f626004dd14ae377020021e4a797a30d105289
|
Bump lxml from 3.2.3 to 4.9.1
|
diff --git a/requirements.txt b/requirements.txt
index 9a504d7..01d3b0c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,5 @@
PyGithub==1.17.0
cssselect==0.8
-lxml==3.2.3
+lxml==4.9.1
pyquery==1.2.4
wsgiref==0.1.2
|
arthur-debert/google-code-issues-migrator
|
7d453a3d08e6cd5a556a9e43bf87e0a3f61e94e5
|
Implement a --only switch, to migrate only the selected issues
|
diff --git a/migrateissues.py b/migrateissues.py
index a02baca..961efa7 100755
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,506 +1,513 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def spacing_template(wordList, spacing=12):
output = []
template = '\t{0:%d}' % (spacing)
for word in wordList:
output.append(template.format(word))
return ' : '.join(output)
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def get_github_milestone(name):
""" Returns the Github milestone with the given name, creating it if necessary. """
try:
return milestone_cache[name]
except KeyError:
for milestone in list(github_repo.get_milestones()):
if milestone.title == name:
return milestone_cache.setdefault(name, milestone)
return milestone_cache.setdefault(name, github_repo.create_milestone(name))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if options.verbose:
output('\n')
outList = [
spacing_template(['Title', issue['title']]),
spacing_template(['State', issue['state']]),
spacing_template(['Labels', issue['labels']]),
spacing_template(['Milestone', issue['milestone']]),
spacing_template(['Source link', issue['link']])
]
output('\n'.join(outList))
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
if issue['milestone']:
github_milestone = get_github_milestone(issue['milestone'])
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels,
milestone = github_milestone)
else:
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
if options.verbose and github_issue:
output('\n')
output(spacing_template(['Dest link', github_issue.url]))
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output("\nSyncing comments ")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if options.verbose:
output('\n\tAdd: From {author} on {date}'.format(**comment))
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
# Also, build a milestone (google code sees it as a label)
milestone = None
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Milestone-'):
milestone = label.replace('Milestone-', '')
continue
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
issue['milestone'] = milestone
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
# Missing issues may still exist in csv; make sure link is good
try:
connection = opener.open(issue['link'])
except urllib2.HTTPError:
return None
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
try:
body = comment('pre').text()
except:
body = "(There was an error importing this comment body. See original issue on Google Code.)"
logging.error("Error importing comment body")
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
-def get_gcode_issues():
+def get_gcode_issues(onlyissues=None):
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
- issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
+ if onlyissues:
+ issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel) if row['ID'] in onlyissues)
+ else:
+ issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
-def process_gcode_issues(existing_issues):
+def process_gcode_issues(existing_issues, onlyissues=None):
""" Migrates all Google Code issues in the given dictionary to Github. """
- issues = get_gcode_issues()
+ issues = get_gcode_issues(onlyissues)
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
# problem occured getting issue information from url, may be deleted
if issue is None:
continue
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
parser.add_option("-v", '--verbose', action = 'store_true', dest = 'verbose', help = 'Print more detailed information during migration', default = False)
+ parser.add_option("-o", "--only", dest="only", help="Migrate only the specified issues. Pass a single parameter containing a list of issues IDs, like \"1 3 98 110\"", default=None)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
+ if options.only:
+ options.only = options.only.split()
+
if options.verbose:
logging.basicConfig(level = logging.INFO)
else:
logging.basicConfig(level = logging.ERROR)
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
milestone_cache = {}
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except Exception:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
- process_gcode_issues(existing_issues)
+ process_gcode_issues(existing_issues, options.only)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
19638de7b94ce01a53616eab8c88d56e50599969
|
Catch bad credentials errors properly.
|
diff --git a/migrateissues.py b/migrateissues.py
index 9aa33c7..a02baca 100755
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,506 +1,506 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def spacing_template(wordList, spacing=12):
output = []
template = '\t{0:%d}' % (spacing)
for word in wordList:
output.append(template.format(word))
return ' : '.join(output)
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def get_github_milestone(name):
""" Returns the Github milestone with the given name, creating it if necessary. """
try:
return milestone_cache[name]
except KeyError:
for milestone in list(github_repo.get_milestones()):
if milestone.title == name:
return milestone_cache.setdefault(name, milestone)
return milestone_cache.setdefault(name, github_repo.create_milestone(name))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if options.verbose:
output('\n')
outList = [
spacing_template(['Title', issue['title']]),
spacing_template(['State', issue['state']]),
spacing_template(['Labels', issue['labels']]),
spacing_template(['Milestone', issue['milestone']]),
spacing_template(['Source link', issue['link']])
]
output('\n'.join(outList))
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
if issue['milestone']:
github_milestone = get_github_milestone(issue['milestone'])
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels,
milestone = github_milestone)
else:
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
if options.verbose and github_issue:
output('\n')
output(spacing_template(['Dest link', github_issue.url]))
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output("\nSyncing comments ")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if options.verbose:
output('\n\tAdd: From {author} on {date}'.format(**comment))
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
# Also, build a milestone (google code sees it as a label)
milestone = None
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Milestone-'):
milestone = label.replace('Milestone-', '')
continue
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
issue['milestone'] = milestone
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
# Missing issues may still exist in csv; make sure link is good
try:
connection = opener.open(issue['link'])
except urllib2.HTTPError:
return None
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
try:
body = comment('pre').text()
except:
body = "(There was an error importing this comment body. See original issue on Google Code.)"
logging.error("Error importing comment body")
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
# problem occured getting issue information from url, may be deleted
if issue is None:
continue
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
parser.add_option("-v", '--verbose', action = 'store_true', dest = 'verbose', help = 'Print more detailed information during migration', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
if options.verbose:
logging.basicConfig(level = logging.INFO)
else:
logging.basicConfig(level = logging.ERROR)
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
milestone_cache = {}
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
- except BadCredentialsException:
+ except Exception:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
a3f711b1a513f0ee7fa20835d977e2302dc7b29f
|
Add tip for fixing pip requirements installation.
|
diff --git a/README.md b/README.md
index 91ad727..df7c207 100644
--- a/README.md
+++ b/README.md
@@ -1,99 +1,101 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
+If you have problems, try `sudo pip install -r requirements.txt --upgrade`.
+
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
For Organizations, use orgname/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
--start-at Start at the given Google Code issue number
--migrate-stars Migrate binned star counts as labels
--verbose Print more verbose information for each add.
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
`--start-at` will skip migrating issues with Google Code issue numbers less than
the provided value.
`--migrate-stars` will migrate the 'Stars' count on each Google Code issue to
Github labels. The following mapping is used:
* `Stars == 1`: Label '1 star'
* `Stars <= 5`: Label '2-5 stars'
* `Stars <= 10`: Label '6-10 stars'
* `Stars <= 20`: Label '11-20 stars'
* `Stars >= 21`: Label '21+ stars'
|
arthur-debert/google-code-issues-migrator
|
2c00975fc8981619eded08c915e31304191ef059
|
Make --verbose more verbose and clean up comments.
|
diff --git a/README.md b/README.md
index 2032035..91ad727 100644
--- a/README.md
+++ b/README.md
@@ -1,99 +1,99 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
For Organizations, use orgname/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
--start-at Start at the given Google Code issue number
--migrate-stars Migrate binned star counts as labels
--verbose Print more verbose information for each add.
-
+
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
-`--start-at` will skip migrating issues with Google Code issue numbers less than
+`--start-at` will skip migrating issues with Google Code issue numbers less than
the provided value.
`--migrate-stars` will migrate the 'Stars' count on each Google Code issue to
Github labels. The following mapping is used:
* `Stars == 1`: Label '1 star'
-* `Stars <= 5`: Label '2â5 stars'
-* `Stars <= 10`: Label '6â10 stars'
-* `Stars <= 20`: Label '11â20 stars'
+* `Stars <= 5`: Label '2-5 stars'
+* `Stars <= 10`: Label '6-10 stars'
+* `Stars <= 20`: Label '11-20 stars'
* `Stars >= 21`: Label '21+ stars'
diff --git a/migrateissues.py b/migrateissues.py
index 4613c8d..9aa33c7 100755
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,507 +1,506 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
-logging.basicConfig(level = logging.ERROR)
-
# The maximum number of records to retrieve from Google Code in a single request
-
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
-
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
-
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
-
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def spacing_template(wordList, spacing=12):
output = []
template = '\t{0:%d}' % (spacing)
for word in wordList:
output.append(template.format(word))
return ' : '.join(output)
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def get_github_milestone(name):
""" Returns the Github milestone with the given name, creating it if necessary. """
try:
return milestone_cache[name]
except KeyError:
for milestone in list(github_repo.get_milestones()):
if milestone.title == name:
return milestone_cache.setdefault(name, milestone)
return milestone_cache.setdefault(name, github_repo.create_milestone(name))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if options.verbose:
output('\n')
outList = [
spacing_template(['Title', issue['title']]),
spacing_template(['State', issue['state']]),
spacing_template(['Labels', issue['labels']]),
spacing_template(['Milestone', issue['milestone']]),
spacing_template(['Source link', issue['link']])
]
output('\n'.join(outList))
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
if issue['milestone']:
github_milestone = get_github_milestone(issue['milestone'])
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels,
milestone = github_milestone)
else:
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
if options.verbose and github_issue:
output('\n')
output(spacing_template(['Dest link', github_issue.url]))
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output("\nSyncing comments ")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if options.verbose:
output('\n\tAdd: From {author} on {date}'.format(**comment))
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
# Also, build a milestone (google code sees it as a label)
milestone = None
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Milestone-'):
milestone = label.replace('Milestone-', '')
continue
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
issue['milestone'] = milestone
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
# Missing issues may still exist in csv; make sure link is good
try:
connection = opener.open(issue['link'])
except urllib2.HTTPError:
return None
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
try:
body = comment('pre').text()
except:
body = "(There was an error importing this comment body. See original issue on Google Code.)"
logging.error("Error importing comment body")
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
# problem occured getting issue information from url, may be deleted
if issue is None:
continue
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
- parser.add_option('--verbose', action = 'store_true', dest = 'verbose', help = 'Print more detailed information for each add.', default = False)
+ parser.add_option("-v", '--verbose', action = 'store_true', dest = 'verbose', help = 'Print more detailed information during migration', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
+ if options.verbose:
+ logging.basicConfig(level = logging.INFO)
+ else:
+ logging.basicConfig(level = logging.ERROR)
+
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
milestone_cache = {}
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
60cbfa40e0ae083840f083b105e71edc32967c5b
|
Revise messages shown when exceptions occur reading GC comment bodies.
|
diff --git a/migrateissues.py b/migrateissues.py
index 60f07bf..30a68ad 100755
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,453 +1,453 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
try:
body = comment('pre').text()
except:
- body = "SEE ISSUE ON GH!"
- logging.error("Comment cannot be imported due to some error in comment")
+ body = "(There was an error importing this comment body. See original issue on Google Code.)"
+ logging.error("Error importing comment body")
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
0999cb44902a8d1d9d3565fe69a3499d2759baaa
|
Added --verbose option to print more information of the issue and comments being added. Also added support for creating github milestones (instead of creating a label)
|
diff --git a/README.md b/README.md
index 06853a8..2032035 100644
--- a/README.md
+++ b/README.md
@@ -1,98 +1,99 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
For Organizations, use orgname/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
--start-at Start at the given Google Code issue number
--migrate-stars Migrate binned star counts as labels
+ --verbose Print more verbose information for each add.
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
`--start-at` will skip migrating issues with Google Code issue numbers less than
the provided value.
`--migrate-stars` will migrate the 'Stars' count on each Google Code issue to
Github labels. The following mapping is used:
* `Stars == 1`: Label '1 star'
* `Stars <= 5`: Label '2â5 stars'
* `Stars <= 10`: Label '6â10 stars'
* `Stars <= 20`: Label '11â20 stars'
* `Stars >= 21`: Label '21+ stars'
diff --git a/migrateissues.py b/migrateissues.py
index c71f999..e3d36ea 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,449 +1,503 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
+def spacing_template(wordList, spacing=12):
+ output = []
+ template = '\t{0:%d}' % (spacing)
+ for word in wordList:
+ output.append(template.format(word))
+ return ' : '.join(output)
+
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
+def get_github_milestone(name):
+ """ Returns the Github milestone with the given name, creating it if necessary. """
+
+ try:
+ return milestone_cache[name]
+ except KeyError:
+ for milestone in list(github_repo.get_milestones()):
+ if milestone.title == name:
+ return milestone_cache.setdefault(name, milestone)
+ return milestone_cache.setdefault(name, github_repo.create_milestone(name))
+
+
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
+ if options.verbose:
+ output('\n')
+ outList = [
+ spacing_template(['Title', issue['title']]),
+ spacing_template(['State', issue['state']]),
+ spacing_template(['Labels', issue['labels']]),
+ spacing_template(['Milestone', issue['milestone']]),
+ spacing_template(['Source link', issue['link']])
+ ]
+ output('\n'.join(outList))
+
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
- github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
+ if issue['milestone']:
+ github_milestone = get_github_milestone(issue['milestone'])
+ github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels,
+ milestone = github_milestone)
+ else:
+ github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
+ if options.verbose and github_issue:
+ output('\n')
+ output(spacing_template(['Dest link', github_issue.url]))
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
- output(", adding comments")
+ output("\nSyncing comments ")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
+ if options.verbose:
+ output('\n\tAdd: From {author} on {date}'.format(**comment))
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
- output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
+ # Also, build a milestone (google code sees it as a label)
+ milestone = None
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
+ if label.startswith('Milestone-'):
+ milestone = label.replace('Milestone-', '')
+ continue
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
+ issue['milestone'] = milestone
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
- connection = opener.open(issue['link'])
+ # Missing issues may still exist in csv; make sure link is good
+ try:
+ connection = opener.open(issue['link'])
+ except urllib2.HTTPError:
+ return None
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
+ # problem occured getting issue information from url, may be deleted
+ if issue is None:
+ continue
+
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
-
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
+ parser.add_option('--verbose', action = 'store_true', dest = 'verbose', help = 'Print more detailed information for each add.', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
+ milestone_cache = {}
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
ae0d5c5d739c18dadf3742dc627b0d2e52e79445
|
Change logging level back to ERROR
|
diff --git a/migrateissues.py b/migrateissues.py
index a6affa7..c71f999 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,449 +1,449 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
-logging.basicConfig(level = logging.INFO)
+logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
fbaa16d0546df02d4132ec8d1544aff2ab7293f3
|
Add hint about orgname/project
|
diff --git a/README.md b/README.md
index 6c43d65..06853a8 100644
--- a/README.md
+++ b/README.md
@@ -1,97 +1,98 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
+ For Organizations, use orgname/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
--start-at Start at the given Google Code issue number
--migrate-stars Migrate binned star counts as labels
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
`--start-at` will skip migrating issues with Google Code issue numbers less than
the provided value.
`--migrate-stars` will migrate the 'Stars' count on each Google Code issue to
Github labels. The following mapping is used:
* `Stars == 1`: Label '1 star'
* `Stars <= 5`: Label '2â5 stars'
* `Stars <= 10`: Label '6â10 stars'
* `Stars <= 20`: Label '11â20 stars'
* `Stars >= 21`: Label '21+ stars'
|
arthur-debert/google-code-issues-migrator
|
f39f28c53fc69853c6eb04847a0127a825a6a6ef
|
Use correct '21+ stars' label when Stars >= 21.
|
diff --git a/migrateissues.py b/migrateissues.py
index dcf3079..a6affa7 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,449 +1,449 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.INFO)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def stars_to_label(stars):
'''Return a label string corresponding to a star range.
For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
'''
stars = int(stars)
if stars == 1:
return '1 star'
elif stars <= 5:
return '2â5 stars'
elif stars <= 10:
return '6â10 stars'
elif stars <= 20:
return '11â20 stars'
else:
- return '25+ stars'
+ return '21+ stars'
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
if options.migrate_stars and 'Stars' in issue_summary:
labels.append(stars_to_label(issue_summary['Stars']))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
bb826d045e77be38a192599de13210eb8201785b
|
Document --migrate-stars option
|
diff --git a/README.md b/README.md
index 7588ec3..6c43d65 100644
--- a/README.md
+++ b/README.md
@@ -1,88 +1,97 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
--start-at Start at the given Google Code issue number
-
- You will be prompted for your github password.
+ --migrate-stars Migrate binned star counts as labels
+
+ You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
`--start-at` will skip migrating issues with Google Code issue numbers less than
the provided value.
+
+`--migrate-stars` will migrate the 'Stars' count on each Google Code issue to
+Github labels. The following mapping is used:
+* `Stars == 1`: Label '1 star'
+* `Stars <= 5`: Label '2â5 stars'
+* `Stars <= 10`: Label '6â10 stars'
+* `Stars <= 20`: Label '11â20 stars'
+* `Stars >= 21`: Label '21+ stars'
|
arthur-debert/google-code-issues-migrator
|
5dd197726d0a7288cb3fa044967c557e4b45c3d2
|
Disable 5-second delay between comment postings when --dry-run is specified.
|
diff --git a/migrateissues.py b/migrateissues.py
index dc64b51..38b32be 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,426 +1,427 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.INFO)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
+
+ # We use a delay to avoid comments being created on GitHub
+ # in the wrong order, due to network non-determinism.
+ # Without this delay, I consistently observed a full 1 in 3
+ # GoogleCode issue comments being reordered.
+ # XXX: querying GitHub in a loop to see when the comment has
+ # been posted may be faster, but will cut into the rate limit.
+ time.sleep(5)
output('.')
- # We use a delay to avoid comments being created on GitHub
- # in the wrong order, due to network non-determinism.
- # Without this delay, I consistently observed a full 1 in 3
- # GoogleCode issue comments being reordered.
- # XXX: querying GitHub in a loop to see when the comment has
- # been posted may be faster, but will cut into the rate limit.
- time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
if not label:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
if options.start_at is not None:
issues = [x for x in issues if int(x['ID']) >= options.start_at]
previous_gid = options.start_at - 1
output('Starting at issue %d\n' % options.start_at)
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
19970f730a891e28451ad5c217bd5dacdce335b9
|
Document --start-at option
|
diff --git a/README.md b/README.md
index 6d4adbf..7588ec3 100644
--- a/README.md
+++ b/README.md
@@ -1,84 +1,88 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
--skip-closed Skip all closed bugs
+ --start-at Start at the given Google Code issue number
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
`--skip-closed` will skip migrating issues that were closed.
+
+`--start-at` will skip migrating issues with Google Code issue numbers less than
+the provided value.
|
arthur-debert/google-code-issues-migrator
|
c90457305e4486761d5fd1d093b0503da76b314f
|
Change default logging to INFO to give a much better idea of the progress of the migration.
|
diff --git a/migrateissues.py b/migrateissues.py
index e8ce101..d1245b0 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,418 +1,418 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
-logging.basicConfig(level = logging.ERROR)
+logging.basicConfig(level = logging.INFO)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
output('.')
# We use a delay to avoid comments being created on GitHub
# in the wrong order, due to network non-determinism.
# Without this delay, I consistently observed a full 1 in 3
# GoogleCode issue comments being reordered.
# XXX: querying GitHub in a loop to see when the comment has
# been posted may be faster, but will cut into the rate limit.
time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b9074c4ee557c2b3b927be8adcc6a71a25c8b72b
|
Add 5 second delay between each comment to reduce reorderings. Without this delay, a full 1 in 3 comments migrated from GoogleCode to GitHub is out of order with respect to other comments on the same issue!
|
diff --git a/migrateissues.py b/migrateissues.py
index 5a3d2e5..e8ce101 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,411 +1,418 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
+import time
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def transform_to_markdown_compliant(string):
# Escape chars interpreted as markdown formatting by GH
string = re.sub(r'(\s)~~', r'\1\\~~', string)
string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
# '==' is also making headers, but can't nicely escape ('\' shows up)
string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
# Escape < to avoid being treated as an html tag
string = re.sub(r'(\s)<', r'\1\\<', string)
# Avoid links that should not be links.
# I can find no way to escape the # w/o using backtics:
string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
# Create issue links
string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
return string
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
text = body.encode('utf-8')
text = transform_to_markdown_compliant(text)
github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
topost = transform_to_markdown_compliant(body)
if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
topost = topost.encode('utf-8')
github_issue.create_comment(topost)
output('.')
-
+ # We use a delay to avoid comments being created on GitHub
+ # in the wrong order, due to network non-determinism.
+ # Without this delay, I consistently observed a full 1 in 3
+ # GoogleCode issue comments being reordered.
+ # XXX: querying GitHub in a loop to see when the comment has
+ # been posted may be faster, but will cut into the rate limit.
+ time.sleep(5)
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
f1f232dc87a31f2cb12261da9e5da633e558e0a0
|
Escape Markdown characters and convert cross-issue links.
|
diff --git a/migrateissues.py b/migrateissues.py
index 4ecd0e3..5a3d2e5 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,389 +1,411 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
+def transform_to_markdown_compliant(string):
+ # Escape chars interpreted as markdown formatting by GH
+ string = re.sub(r'(\s)~~', r'\1\\~~', string)
+ string = re.sub(r'\n(\s*)>', r'\n\1\\>', string)
+ string = re.sub(r'\n(\s*)#', r'\n\1\\#', string)
+ string = re.sub(r'(?m)^-([- \r]*)$', r'\\-\1', string)
+ # '==' is also making headers, but can't nicely escape ('\' shows up)
+ string = re.sub(r'(\S\s*\n)(=[= ]*(\r?\n|$))', r'\1\n\2', string)
+ # Escape < to avoid being treated as an html tag
+ string = re.sub(r'(\s)<', r'\1\\<', string)
+ # Avoid links that should not be links.
+ # I can find no way to escape the # w/o using backtics:
+ string = re.sub(r'(\s+)(#\d+)(\W)', r'\1`\2`\3', string)
+ # Create issue links
+ string = re.sub(r'\bi#(\d+)', r'issue #\1', string)
+ string = re.sub(r'\bissue (\d+)', r'issue #\1', string)
+ return string
+
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
- github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
+ text = body.encode('utf-8')
+ text = transform_to_markdown_compliant(text)
+ github_issue = github_repo.create_issue(issue['title'], body = text, labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
- if body in existing_comments:
+ topost = transform_to_markdown_compliant(body)
+ if topost in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
- github_issue.create_comment(body.encode('utf-8'))
+ topost = topost.encode('utf-8')
+ github_issue.create_comment(topost)
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
connection = opener.open(issue['link'])
encoding = connection.headers['content-type'].split('charset=')[-1]
# Pass "ignore" so malformed page data doesn't abort us
doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d68ff5f7cc3bbad444b790ecad0fbe6bcdf0b412
|
Pass "ignore" on decoding Google Code issue text to avoid bad characters (such as in https://code.google.com/p/drmemory/issues/detail?id=1289#c3) throwing an exception and aborting the migration.
|
diff --git a/migrateissues.py b/migrateissues.py
index 6eb5f8b..4ecd0e3 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,389 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
- doc = pq(opener.open(issue['link']).read())
+ connection = opener.open(issue['link'])
+ encoding = connection.headers['content-type'].split('charset=')[-1]
+ # Pass "ignore" so malformed page data doesn't abort us
+ doc = pq(connection.read().decode(encoding, "ignore"))
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
e51608d637e0db2c0c795b4b0db5946e638b1a84
|
Add an option to migrate binned star counts
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0d20b64
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+*.pyc
diff --git a/migrateissues.py b/migrateissues.py
old mode 100644
new mode 100755
index 6eb5f8b..e0805d5
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,408 @@
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
-GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
+GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter%20Stars&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
+def stars_to_label(stars):
+ '''Return a label string corresponding to a star range.
+
+ For example, '1' -> '1 star', '2' -> '2-5 stars', etc.
+ '''
+ stars = int(stars)
+ if stars == 1:
+ return '1 star'
+ elif stars <= 5:
+ return '2â5 stars'
+ elif stars <= 10:
+ return '6â10 stars'
+ elif stars <= 20:
+ return '11â20 stars'
+ else:
+ return '25+ stars'
+
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
+ if options.migrate_stars and 'Stars' in issue_summary:
+ labels.append(stars_to_label(issue_summary['Stars']))
+
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
+ parser.add_option('--migrate-stars', action = 'store_true', dest = 'migrate_stars', help = 'Migrate binned star counts as labels', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
5dee69737510ede38a78623b6b3acbbf4c739a14
|
In order for the import to pass on some comments where the actual text contains invalid characters added a try/catch block around the retrieval of comment's body
|
diff --git a/migrateissues.py b/migrateissues.py
old mode 100644
new mode 100755
index 6eb5f8b..e1368d7
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,390 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
- body = comment('pre').text()
+ try:
+ body = comment('pre').text()
+ except:
+ body = "SEE ISSUE ON GH!"
+ logging.error("Comment cannot be imported due to some error in comment")
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
ee09b9c6f8aa85623b1f12eda6c78e0852def4f9
|
Add a --start-at option to start at a particular google code issue number
|
diff --git a/migrateissues.py b/migrateissues.py
index 6eb5f8b..5b77a0f 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,392 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
+ if options.start_at is not None:
+ issues = [x for x in issues if int(x['ID']) >= options.start_at]
+ previous_gid = options.start_at - 1
+ output('Starting at issue %d\n' % options.start_at)
+
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
+ parser.add_option('--start-at', dest = 'start_at', help = 'Start at the given Google Code issue number', default = None, type = int)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
3827dbe3b692b333d781c9be1e877b75bf2cc8b7
|
Don't try to add empty labels
|
diff --git a/migrateissues.py b/migrateissues.py
index 6eb5f8b..753eb34 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,388 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
+ if not label:
+ continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
c80ddefc60430c890ff235c998bdf04f6e0fcb30
|
Handle attachements with non-ascii filenames properly.
|
diff --git a/migrateissues.py b/migrateissues.py
index 6eb5f8b..babb9d3 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,386 +1,386 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
- body = '\n\n'
+ body = u'\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
- body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
+ body += u'**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
if options.skip_closed and (issue['state'] == 'closed'):
continue
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
ea8083bdbdef64f7d0b77c6c150483d7c0e93bd8
|
Add --skip-closed option
|
diff --git a/README.md b/README.md
index 863a9f7..6d4adbf 100644
--- a/README.md
+++ b/README.md
@@ -1,81 +1,84 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
Github's API does not support creating issues or adding comments without
notifying everyone watching the repository. As a result, running this script
targetting an existing repository with watchers who do not want to recieve a
very large number of emails is probably not a good idea.
I do not know of any way around this other than deleting and recreating the
repository immediately before running the import.
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
+ --skip-closed Skip all closed bugs
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
+
+`--skip-closed` will skip migrating issues that were closed.
diff --git a/migrateissues.py b/migrateissues.py
index 947d70a..6eb5f8b 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,382 +1,386 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
+ if options.skip_closed and (issue['state'] == 'closed'):
+ continue
+
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
+ parser.add_option('--skip-closed', action = 'store_true', dest = 'skip_closed', help = 'Skip all closed bugs', default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
32da3ae9f82449cb97f64543b6fe5967591a2a89
|
Encode attachment filename to UTF-8, in case it has non-ASCII chars in it
|
diff --git a/migrateissues.py b/migrateissues.py
index 42640a5..947d70a 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,382 +1,382 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
github_issue = None
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
- body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
+ body += '**Attachment:** [{}]({})'.format(attachment('b').text().encode('utf-8'), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
e118fb2adc607d0cb363cff36093ec088ae01608
|
Fix for error when running with --dry-run: "UnboundLocalError: local variable 'github_issue' referenced before assignment"
|
diff --git a/migrateissues.py b/migrateissues.py
index 7c19d0c..42640a5 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,380 +1,382 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
+ github_issue = None
+
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['comments'] = []
def split_comment(comment, text):
# Github has an undocumented maximum comment size (unless I just failed
# to find where it was documented), so split comments up into multiple
# posts as needed.
while text:
comment['body'] = text[:7000]
text = text[7000:]
if text:
comment['body'] += '...'
text = '...' + text
issue['comments'].append(comment.copy())
split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
ba697b6eec9ea7b56820217646a8e80759778318
|
Add a note that running this results in emails being sent
|
diff --git a/README.md b/README.md
index 6091836..863a9f7 100644
--- a/README.md
+++ b/README.md
@@ -1,71 +1,81 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
+## THIS SCRIPT WILL SEND A LOT OF EMAILS TO ALL WATCHERS
+
+Github's API does not support creating issues or adding comments without
+notifying everyone watching the repository. As a result, running this script
+targetting an existing repository with watchers who do not want to recieve a
+very large number of emails is probably not a good idea.
+
+I do not know of any way around this other than deleting and recreating the
+repository immediately before running the import.
+
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
-c, --google-code-cookie Supply cookies to use for scraping Google Code
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
pages from Google Code. Google Code normally mangles names for spam prevention,
and getting the raw names requires being logged in and having filled out a
CAPTCHA.
|
arthur-debert/google-code-issues-migrator
|
73799d7192cda241d19996bf44e8f26449430898
|
Split comments which are longer than Github's limit into multiple comments
|
diff --git a/migrateissues.py b/migrateissues.py
index 33547ba..7c19d0c 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,369 +1,380 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
+
+ issue['comments'] = []
+ def split_comment(comment, text):
+ # Github has an undocumented maximum comment size (unless I just failed
+ # to find where it was documented), so split comments up into multiple
+ # posts as needed.
+ while text:
+ comment['body'] = text[:7000]
+ text = text[7000:]
+ if text:
+ comment['body'] += '...'
+ text = '...' + text
+ issue['comments'].append(comment.copy())
+
+ split_comment(issue, description('pre').text())
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
- content = description('pre').text(),
+ content = issue['comments'].pop(0)['body'],
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
if comment.hasClass('delcom'):
continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
- issue['comments'].append({
- 'date': date,
- 'author': author,
- 'body': body
- })
+ split_comment({'date': date, 'author': author}, body)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
c7ae6df428d38fac3bb01a41b48bfc1b2766cc53
|
Skip deleted comments
|
diff --git a/migrateissues.py b/migrateissues.py
index 849a3f3..33547ba 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,367 +1,369 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = description('pre').text(),
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
+ if comment.hasClass('delcom'):
+ continue # Skip deleted comments
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
f5b622c77afb75193f797ac973156b2048296a45
|
Use unicode strings where needed
|
diff --git a/migrateissues.py b/migrateissues.py
index cebe222..849a3f3 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,367 +1,367 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
- body = '_From {author} on {date}_\n\n{body}'.format(**comment)
+ body = u'_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
def get_author(doc):
userlink = doc('.userlink')
return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
description = doc('.issuedescription .issuedescription')
issue['author'] = get_author(description)
- issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
+ issue['content'] = u'_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
content = description('pre').text(),
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
date = parse_gcode_date(comment('.date').attr('title'))
body = comment('pre').text()
author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
1243d5a4bb563bf5900d3c5fdc9f127f9f89f224
|
Link usernames to their Google Code profiles
|
diff --git a/migrateissues.py b/migrateissues.py
index a07b2e3..cebe222 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,362 +1,367 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
+ def get_author(doc):
+ userlink = doc('.userlink')
+ return '[{}](https://code.google.com{})'.format(userlink.text(), userlink.attr('href'))
+
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
- 'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
opener = urllib2.build_opener()
if options.google_code_cookie:
opener.addheaders = [('Cookie', options.google_code_cookie)]
doc = pq(opener.open(issue['link']).read())
- issue['content'] = doc('.issuedescription .issuedescription pre').text()
+ description = doc('.issuedescription .issuedescription')
+ issue['author'] = get_author(description)
issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
+ content = description('pre').text(),
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
date = parse_gcode_date(comment('.date').attr('title'))
- author = comment('.userlink').text()
body = comment('pre').text()
+ author = get_author(comment)
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
354912a7842eb68672cc06808bf3538bbc13b714
|
Add an option to use a cookie for Google Code requests
|
diff --git a/README.md b/README.md
index 6a57b85..6091836 100644
--- a/README.md
+++ b/README.md
@@ -1,65 +1,71 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- Github doesn't support attachments for issues, so any attachments are simply
listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
+ -c, --google-code-cookie Supply cookies to use for scraping Google Code
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
+
+`--google-code-cookie` takes a HTTP header encoded cookie to use when fetching
+pages from Google Code. Google Code normally mangles names for spam prevention,
+and getting the raw names requires being logged in and having filled out a
+CAPTCHA.
diff --git a/migrateissues.py b/migrateissues.py
index 8129c74..a07b2e3 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,358 +1,362 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_attachments(link, attachments):
if not attachments:
return ''
body = '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
return body
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
- doc = pq(urllib2.urlopen(issue['link']).read())
+ opener = urllib2.build_opener()
+ if options.google_code_cookie:
+ opener.addheaders = [('Cookie', options.google_code_cookie)]
+ doc = pq(opener.open(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
date = parse_gcode_date(comment('.date').attr('title'))
author = comment('.userlink').text()
body = comment('pre').text()
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
+ parser.add_option("-c", "--google-code-cookie", dest = "google_code_cookie", help = "Cookie to use for Google Code requests. Required to get unmangled names", default = '')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
8092976987b39d1c0209fef39e1c0c92baf08e4d
|
Also scrape attachments from the initial issue
|
diff --git a/migrateissues.py b/migrateissues.py
index 47b6fbe..8129c74 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,352 +1,358 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
try:
parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
except ValueError:
return date_text
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
+def get_attachments(link, attachments):
+ if not attachments:
+ return ''
+
+ body = '\n\n'
+ for attachment in (pq(a) for a in attachments):
+ if not attachment('a'): # Skip deleted attachments
+ continue
+
+ # Linking to the comment with the attachment rather than the
+ # attachment itself since Google Code uses download tokens for
+ # attachments
+ body += '**Attachment:** [{}]({})'.format(attachment('b').text(), link)
+ return body
+
+
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
doc = pq(urllib2.urlopen(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
- issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}\n\n{footer}'.format(
+ issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}{attachments}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
+ attachments = get_attachments(issue['link'], doc('.issuedescription .issuedescription .attachments')),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
date = parse_gcode_date(comment('.date').attr('title'))
author = comment('.userlink').text()
body = comment('pre').text()
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
- attachments = comment('.attachments')
- if attachments:
- body += '\n\n'
- for attachment in (pq(a) for a in attachments):
- if not attachment('a'): # Skip deleted attachments
- continue
-
- # Linking to the comment with the attachment rather than the
- # attachment itself since Google Code uses download tokens for
- # attachments
- body += '**Attachment:** [{}]({}#{})'.format(
- attachment('b').text(), issue['link'], comment.attr('id'))
+ body += get_attachments('{}#{}'.format(issue['link'], comment.attr('id')), comment('.attachments'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
5319d4f70d9e92a093730243c7a6c1e7acb4b56b
|
Make dates prettier
|
diff --git a/migrateissues.py b/migrateissues.py
index aabbe78..47b6fbe 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,349 +1,352 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
- parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
+ try:
+ parsed = datetime.strptime(date_text, '%a %b %d %H:%M:%S %Y')
+ except ValueError:
+ return date_text
+
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
doc = pq(urllib2.urlopen(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
- # TODO date formatting
- issue['content'] = '_From {author} on {date}_\n\n{content}\n\n{footer}'.format(
+ issue['content'] = '_From {author} on {date:%B %d, %Y %H:%M:%S}_\n\n{content}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
- date = comment('.date').attr('title') # TODO: transform to better format
+ date = parse_gcode_date(comment('.date').attr('title'))
author = comment('.userlink').text()
body = comment('pre').text()
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
attachments = comment('.attachments')
if attachments:
body += '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({}#{})'.format(
attachment('b').text(), issue['link'], comment.attr('id'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
1021be99e418169dcecc2b4297b28d9d5e3f0918
|
Fix check for existing comments
|
diff --git a/migrateissues.py b/migrateissues.py
index 62fff2f..aabbe78 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,349 +1,349 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
- if body in existing_issues:
+ if body in existing_comments:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
doc = pq(urllib2.urlopen(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
# TODO date formatting
issue['content'] = '_From {author} on {date}_\n\n{content}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
date = comment('.date').attr('title') # TODO: transform to better format
author = comment('.userlink').text()
body = comment('pre').text()
updates = comment('.updates .box-inner')
if updates:
body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
attachments = comment('.attachments')
if attachments:
body += '\n\n'
for attachment in (pq(a) for a in attachments):
if not attachment('a'): # Skip deleted attachments
continue
# Linking to the comment with the attachment rather than the
# attachment itself since Google Code uses download tokens for
# attachments
body += '**Attachment:** [{}]({}#{})'.format(
attachment('b').text(), issue['link'], comment.attr('id'))
# Strip the placeholder text if there's any other updates
body = body.replace('(No comment was entered for this change.)\n\n', '')
issue['comments'].append({
'date': date,
'author': author,
'body': body
})
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
df5b83c3b4acff7150deb82fc500a25957ebaafa
|
Link to the original comment when there's attachments
|
diff --git a/README.md b/README.md
index d228f72..6a57b85 100644
--- a/README.md
+++ b/README.md
@@ -1,65 +1,65 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, please
consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
### How it works ###
The script iterates over the issues and comments in a Google Code repository,
creating matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the
script, and lose their original creation date. We try to mitigate this by
adding a non-obtrusive header to each issue and comment stating the original
author and creation date.
- - Attachments are lost, since Github doesn't support them in issues or
- comments.
+ - Github doesn't support attachments for issues, so any attachments are simply
+ listed as links to the attachment on Google Code.
- Support for Merged-into links for duplicate issues are not implemented.
Otherwise almost everything is preserved, including labels, issue state
(open/closed), and issue status (invalid, wontfix, duplicate).
The script can be run repeatedly to migrate new issues and comments, without
mucking up what's already on Github.
### Required Python libraries ###
Run `pip install -r requirements.txt` to install all required libraries.
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help Show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
You will be prompted for your github password.
`--assign-owner` automatically assigns any issues that currently have an owner
to your Github user (the one running the script), even if you weren't the
original owner. This is used to save a little time in cases where you do in
fact own most issues.
`--dry-run` does as much as possible without actually adding anything to
Github. It's useful as a test, to turn up any errors or unexpected behaviors
before you run the script, irreversibly, on your real repository.
`--omit-priorities` skips migration of Google Code Priority labels, since many
projects don't actually use them, and would just remove them from Github
anyway.
`--synchronize-ids` attempts to ensure that every Github issue gets the same ID
as its original Google Code issue. Normally this happens anyway, but in some
cases Google Code skips issue numbers; this option fills the gaps with dummy
issues to ensure that the next real issue keeps the same numbering. This only
works, of course, if the migration starts with a fresh Github repistory.
diff --git a/migrateissues.py b/migrateissues.py
index df561ca..62fff2f 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,332 +1,349 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
# Add any remaining comments to the Github issue
output(", adding comments")
for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_issues:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
doc = pq(urllib2.urlopen(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
# TODO date formatting
issue['content'] = '_From {author} on {date}_\n\n{content}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
- comment = {
- 'date': comment('.date').attr('title'), # TODO: transform to better format
- 'author': comment('.userlink').text(),
- 'body': comment('pre').text()
- }
+
+ date = comment('.date').attr('title') # TODO: transform to better format
+ author = comment('.userlink').text()
+ body = comment('pre').text()
+
updates = comment('.updates .box-inner')
if updates:
- if comment['body'] == '(No comment was entered for this change.)':
- comment['body'] = ''
- else:
- comment['body'] += '\n\n'
- comment['body'] += updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
- issue['comments'].append(comment)
+ body += '\n\n' + updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
+
+ attachments = comment('.attachments')
+ if attachments:
+ body += '\n\n'
+ for attachment in (pq(a) for a in attachments):
+ if not attachment('a'): # Skip deleted attachments
+ continue
+
+ # Linking to the comment with the attachment rather than the
+ # attachment itself since Google Code uses download tokens for
+ # attachments
+ body += '**Attachment:** [{}]({}#{})'.format(
+ attachment('b').text(), issue['link'], comment.attr('id'))
+
+ # Strip the placeholder text if there's any other updates
+ body = body.replace('(No comment was entered for this change.)\n\n', '')
+
+ issue['comments'].append({
+ 'date': date,
+ 'author': author,
+ 'body': body
+ })
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b0bddc21d71bd64e57eb30226b5d4cb37545b143
|
Include status updates in migrated comments
|
diff --git a/migrateissues.py b/migrateissues.py
index bc636da..df561ca 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,331 +1,332 @@
#!/usr/bin/env python
import csv
import getpass
import logging
import optparse
import re
import sys
import urllib2
from datetime import datetime
from github import Github
from github import GithubException
from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : 'bug',
'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': 'invalid',
'duplicate': 'duplicate',
'wontfix': 'wontfix'
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
if s:
s = s.replace('%', '%') # Escape % signs
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try:
return label_cache[name]
except KeyError:
try:
return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
-def should_migrate_comment(comment):
- return '(No comment was entered for this change.)' not in comment
-
-
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception('Aborting to to impending Github API rate-limit cutoff.')
body = issue['content'].replace('%', '%')
output('Adding issue %d' % issue['gid'])
if not options.dry_run:
github_labels = [github_label(label) for label in issue['labels']]
github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [comment.body for comment in github_issue.get_comments()]
- comments = [comment for comment in gcode_issue['comments']
- if should_migrate_comment(comment)]
-
# Add any remaining comments to the Github issue
output(", adding comments")
- for i, comment in enumerate(comments):
+ for i, comment in enumerate(gcode_issue['comments']):
body = '_From {author} on {date}_\n\n{body}'.format(**comment)
if body in existing_issues:
logging.info('Skipping comment %d: already present', i + 1)
else:
logging.info('Adding comment %d', i + 1)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
output('.')
def get_gcode_issue(issue_summary):
# Populate properties available from the summary CSV
issue = {
'gid': int(issue_summary['ID']),
'title': issue_summary['Summary'].replace('%', '%'),
'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
'author': issue_summary['Reporter'],
'owner': issue_summary['Owner'],
'state': 'closed' if issue_summary['Closed'] else 'open',
'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
'status': issue_summary['Status'].lower()
}
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ['imported']
for label in issue_summary['AllLabels'].split(', '):
if label.startswith('Priority-') and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label, label))
# Add additional labels based on the issue's state
if issue['status'] in STATE_MAPPING:
labels.append(STATE_MAPPING[issue['status']])
issue['labels'] = labels
# Scrape the issue details page for the issue body and comments
doc = pq(urllib2.urlopen(issue['link']).read())
issue['content'] = doc('.issuedescription .issuedescription pre').text()
# TODO date formatting
issue['content'] = '_From {author} on {date}_\n\n{content}\n\n{footer}'.format(
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
**issue)
issue['comments'] = []
for comment in doc('.issuecomment'):
comment = pq(comment)
if not comment('.date'):
continue # Sign in prompt line uses same class
- issue['comments'].append({
+ comment = {
'date': comment('.date').attr('title'), # TODO: transform to better format
'author': comment('.userlink').text(),
'body': comment('pre').text()
- })
+ }
+ updates = comment('.updates .box-inner')
+ if updates:
+ if comment['body'] == '(No comment was entered for this change.)':
+ comment['body'] = ''
+ else:
+ comment['body'] += '\n\n'
+ comment['body'] += updates.html().strip().replace('\n', '').replace('<b>', '**').replace('</b>', '**').replace('<br/>', '\n')
+ issue['comments'].append(comment)
return issue
def get_gcode_issues():
count = 100
start_index = 0
issues = []
while True:
url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
if issues and 'truncated' in issues[-1]['ID']:
issues.pop()
start_index += count
else:
return issues
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
issues = get_gcode_issues()
previous_gid = 1
for issue in issues:
issue = get_gcode_issue(issue)
# If we're trying to do a complete migration to a fresh Github project,
# and want to keep the issue numbers synced with Google Code's, then we
# need to create dummy closed issues for deleted or missing Google Code
# issues.
if options.synchronize_ids:
for gid in xrange(previous_gid + 1, issue['gid']):
if gid in existing_issues:
continue
output('Creating dummy entry for missing issue %d\n' % gid)
title = 'Google Code skipped issue %d' % gid
body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
github_issue.edit(state = 'closed')
existing_issues[previous_gid] = github_issue
previous_gid = issue['gid']
# Add the issue and its comments to Github, if we haven't already
if issue['gid'] in existing_issues:
github_issue = existing_issues[issue['gid']]
output('Not adding issue %d (exists)' % issue['gid'])
else:
github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, issue)
if github_issue.state != issue['state']:
github_issue.edit(state = issue['state'])
output('\n')
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if not id_match:
continue
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
break
except BadCredentialsException:
print "Bad credentials, try again."
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try:
github_owner = github.get_user(owner_name)
except GithubException:
try:
github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else:
github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
4ad8284d06b7e29dd1732a682b86151b0b088833
|
Add requirements.txt
|
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..9a504d7
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+PyGithub==1.17.0
+cssselect==0.8
+lxml==3.2.3
+pyquery==1.2.4
+wsgiref==0.1.2
|
arthur-debert/google-code-issues-migrator
|
46800babc07ee9c871c39313bf566cd7acc68f0e
|
Rewrite to scrape the site since the API no longer exists
|
diff --git a/migrateissues.py b/migrateissues.py
index 8614bd5..bc636da 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,414 +1,331 @@
#!/usr/bin/env python
+import csv
+import getpass
+import logging
import optparse
-import sys
import re
-import logging
-import getpass
+import sys
+import urllib2
from datetime import datetime
from github import Github
from github import GithubException
-from atom.core import XmlElement
-
-import gdata.projecthosting.client
-import gdata.projecthosting.data
-import gdata.gauth
-import gdata.client
-import gdata.data
+from pyquery import PyQuery as pq
logging.basicConfig(level = logging.ERROR)
-# Patch gdata's CommentEntry Updates object to include the merged-into field
-
-class MergedIntoUpdate(XmlElement):
- _qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
-gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
-
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
-GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
-GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail?id=%d'
+GOOGLE_ISSUE_TEMPLATE = '_Original issue: {}_'
+GOOGLE_ISSUES_URL = 'https://code.google.com/p/{}/issues/csv?can=1&num={}&start={}&colspec=ID%20Type%20Status%20Owner%20Summary%20Opened%20Closed%20Reporter&sort=id'
+GOOGLE_URL = 'http://code.google.com/p/{}/issues/detail?id={}'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
-GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL_RE
-NUM_RE = re.compile('\s#(\d+)')
-ISSUE_RE = re.compile('[I|i]ssue\s(\d+)')
+GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL_RE)
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
- 'Type-Defect' : "bug",
- 'Type-Enhancement' : "enhancement"
+ 'Type-Defect' : 'bug',
+ 'Type-Enhancement' : 'enhancement'
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
- 'invalid': "invalid",
- 'duplicate': "duplicate",
- 'wontfix': "wontfix"
+ 'invalid': 'invalid',
+ 'duplicate': 'duplicate',
+ 'wontfix': 'wontfix'
}
-
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
-
-def mapissue(match):
- """Map a Google Code issue reference to the correct Github issue number """
- old = match.group(1)
- # TODO: map old issue to new issue
- # can't assume 1:1 mapping due to missing issues on GC & added issues on Github
- return 'issue #' +old
-
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
- if s is not None:
- s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
+ if s:
s = s.replace('%', '%') # Escape % signs
- s = re.sub(ISSUE_RE,mapissue, s) # convert Google Code issue refs to Github markup
return s
def github_label(name, color = "FFFFFF"):
-
""" Returns the Github label with the given name, creating it if necessary. """
- try: return label_cache[name]
+ try:
+ return label_cache[name]
except KeyError:
- try: return label_cache.setdefault(name, github_repo.get_label(name))
+ try:
+ return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
-def parse_gcode_id(id_text):
-
- """ Returns the numeric part of a Google Code ID string. """
-
- return int(re.search("\d+$", id_text).group(0))
-
-
def parse_gcode_date(date_text):
-
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
-
- """ Returns True if the given comment should be migrated to Github, otherwise False.
-
- A comment should be migrated if it represents a duplicate-merged-into update, or if
- it has a body that isn't the automated 'issue x has been merged into this issue'.
-
- """
-
- if comment.content.text:
- if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
- return False
- return True
- elif comment.updates.mergedIntoUpdate:
- return True
- return False
-
-
-def format_comment(comment):
-
- """ Returns the Github comment body for the given Google Code comment.
-
- Most comments are left unchanged, except to add a header identifying their original
- author and post-date. Google Code's merged-into comments, used to flag duplicate
- issues, are replaced with a little message linking to the parent issue.
-
- """
-
- author = comment.author[0].name.text
- date = parse_gcode_date(comment.published.text)
- content = escape(comment.content.text)
-
- if comment.updates.mergedIntoUpdate:
- return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
- else: return "_From %s on %s_\n%s" % (author, date, content)
-
+ return '(No comment was entered for this change.)' not in comment
def add_issue_to_github(issue):
-
""" Migrates the given Google Code issue to Github. """
- gid = parse_gcode_id(issue.id.text)
- status = issue.status.text.lower() if issue.status else ""
- title = issue.title.text
- link = issue.link[1].href
- author = issue.author[0].name.text
- content = issue.content.text
- date = parse_gcode_date(issue.published.text)
-
- # Github takes issue with % in the title or body.
- title = title.replace('%', '%')
-
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
- raise Exception("Aborting to to impending Github API rate-limit cutoff.")
-
- # Build a list of labels to apply to the new issue, including an 'imported' tag that
- # we can use to identify this issue as one that's passed through migration.
+ raise Exception('Aborting to to impending Github API rate-limit cutoff.')
- labels = ["imported"]
+ body = issue['content'].replace('%', '%')
- # Convert Google Code labels to Github labels where possible
-
- if issue.label:
- for label in issue.label:
- if label.text.startswith("Priority-") and options.omit_priority:
- continue
- labels.append(LABEL_MAPPING.get(label.text, label.text))
-
- # Add additional labels based on the issue's state
-
- if status in STATE_MAPPING:
- labels.append(STATE_MAPPING[status])
-
- # Add the new Github issue with its labels and a header identifying it as migrated
-
- github_issue = None
-
- header = "_Original author: %s (%s)_" % (author, date)
- footer = GOOGLE_ISSUE_TEMPLATE % link
- body = "%s\n\n%s\n\n\n%s" % (header, content, footer)
- body = escape(body)
-
- output("Adding issue %d" % gid)
+ output('Adding issue %d' % issue['gid'])
if not options.dry_run:
- github_labels = [ github_label(label) for label in labels ]
- github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
+ github_labels = [github_label(label) for label in issue['labels']]
+ github_issue = github_repo.create_issue(issue['title'], body = body.encode('utf-8'), labels = github_labels)
# Assigns issues that originally had an owner to the current user
-
- if issue.owner and options.assign_owner:
+ if issue['owner'] and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
-def add_comments_to_issue(github_issue, gid):
-
+def add_comments_to_issue(github_issue, gcode_issue):
""" Migrates all comments from a Google Code issue to its Github copy. """
- start_index = 1
- max_results = GOOGLE_MAX_RESULTS
-
# Retrieve existing Github comments, to figure out which Google Code comments are new
+ existing_comments = [comment.body for comment in github_issue.get_comments()]
+
+ comments = [comment for comment in gcode_issue['comments']
+ if should_migrate_comment(comment)]
+
+ # Add any remaining comments to the Github issue
+ output(", adding comments")
+ for i, comment in enumerate(comments):
+ body = '_From {author} on {date}_\n\n{body}'.format(**comment)
+ if body in existing_issues:
+ logging.info('Skipping comment %d: already present', i + 1)
+ else:
+ logging.info('Adding comment %d', i + 1)
+ if not options.dry_run:
+ github_issue.create_comment(body.encode('utf-8'))
+ output('.')
+
+
+def get_gcode_issue(issue_summary):
+ # Populate properties available from the summary CSV
+ issue = {
+ 'gid': int(issue_summary['ID']),
+ 'title': issue_summary['Summary'].replace('%', '%'),
+ 'link': GOOGLE_URL.format(google_project_name, issue_summary['ID']),
+ 'author': issue_summary['Reporter'],
+ 'owner': issue_summary['Owner'],
+ 'state': 'closed' if issue_summary['Closed'] else 'open',
+ 'date': datetime.fromtimestamp(float(issue_summary['OpenedTimestamp'])),
+ 'status': issue_summary['Status'].lower()
+ }
- existing_comments = [ comment.body for comment in github_issue.get_comments() ]
-
- # Retain compatibility with earlier versions of migrateissues.py
-
- existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
-
- # Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
+ # Build a list of labels to apply to the new issue, including an 'imported' tag that
+ # we can use to identify this issue as one that's passed through migration.
+ labels = ['imported']
+ for label in issue_summary['AllLabels'].split(', '):
+ if label.startswith('Priority-') and options.omit_priority:
+ continue
+ labels.append(LABEL_MAPPING.get(label, label))
+ # Add additional labels based on the issue's state
+ if issue['status'] in STATE_MAPPING:
+ labels.append(STATE_MAPPING[issue['status']])
+
+ issue['labels'] = labels
+
+ # Scrape the issue details page for the issue body and comments
+ doc = pq(urllib2.urlopen(issue['link']).read())
+ issue['content'] = doc('.issuedescription .issuedescription pre').text()
+
+ # TODO date formatting
+ issue['content'] = '_From {author} on {date}_\n\n{content}\n\n{footer}'.format(
+ footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, issue['gid'])),
+ **issue)
+
+ issue['comments'] = []
+ for comment in doc('.issuecomment'):
+ comment = pq(comment)
+ if not comment('.date'):
+ continue # Sign in prompt line uses same class
+ issue['comments'].append({
+ 'date': comment('.date').attr('title'), # TODO: transform to better format
+ 'author': comment('.userlink').text(),
+ 'body': comment('pre').text()
+ })
+
+ return issue
+
+def get_gcode_issues():
+ count = 100
+ start_index = 0
+ issues = []
while True:
+ url = GOOGLE_ISSUES_URL.format(google_project_name, count, start_index)
+ issues.extend(row for row in csv.DictReader(urllib2.urlopen(url), dialect=csv.excel))
- query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
- comments_feed = google.get_comments(google_project_name, gid, query = query)
-
- # Filter out empty and otherwise unnecessary comments, unless they contain the
- # 'migrated into' update for a duplicate issue; we'll generate a special Github
- # comment for those.
-
- comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
-
- # Add any remaining comments to the Github issue
-
- if not comments:
- break
- if start_index == 1:
- output(", adding comments")
- for comment in comments:
- add_comment_to_github(comment, github_issue)
- output(".")
-
- start_index += max_results
-
-
-def add_comment_to_github(comment, github_issue):
-
- """ Adds a single Google Code comment to the given Github issue. """
-
- gid = parse_gcode_id(comment.id.text)
- body = format_comment(comment)
-
- logging.info("Adding comment %d", gid)
-
- if not options.dry_run:
- github_issue.create_comment(body.encode("utf-8"))
+ if issues and 'truncated' in issues[-1]['ID']:
+ issues.pop()
+ start_index += count
+ else:
+ return issues
def process_gcode_issues(existing_issues):
-
""" Migrates all Google Code issues in the given dictionary to Github. """
- start_index = 1
- previous_gid = 0
- max_results = GOOGLE_MAX_RESULTS
-
- while True:
-
- query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
- issues_feed = google.get_issues(google_project_name, query = query)
+ issues = get_gcode_issues()
+ previous_gid = 1
+
+ for issue in issues:
+ issue = get_gcode_issue(issue)
+
+ # If we're trying to do a complete migration to a fresh Github project,
+ # and want to keep the issue numbers synced with Google Code's, then we
+ # need to create dummy closed issues for deleted or missing Google Code
+ # issues.
+ if options.synchronize_ids:
+ for gid in xrange(previous_gid + 1, issue['gid']):
+ if gid in existing_issues:
+ continue
+
+ output('Creating dummy entry for missing issue %d\n' % gid)
+ title = 'Google Code skipped issue %d' % gid
+ body = '_Skipping this issue number to maintain synchronization with Google Code issue IDs._'
+ footer = GOOGLE_ISSUE_TEMPLATE.format(GOOGLE_URL.format(google_project_name, gid))
+ body += '\n\n' + footer
+ github_issue = github_repo.create_issue(title, body = body, labels = [github_label('imported')])
+ github_issue.edit(state = 'closed')
+ existing_issues[previous_gid] = github_issue
+ previous_gid = issue['gid']
+
+ # Add the issue and its comments to Github, if we haven't already
+ if issue['gid'] in existing_issues:
+ github_issue = existing_issues[issue['gid']]
+ output('Not adding issue %d (exists)' % issue['gid'])
+ else:
+ github_issue = add_issue_to_github(issue)
+
+ if github_issue:
+ add_comments_to_issue(github_issue, issue)
+ if github_issue.state != issue['state']:
+ github_issue.edit(state = issue['state'])
+ output('\n')
- if not issues_feed.entry:
- break
-
- for issue in issues_feed.entry:
-
- gid = parse_gcode_id(issue.id.text)
-
- # If we're trying to do a complete migration to a fresh Github project, and
- # want to keep the issue numbers synced with Google Code's, then we need to
- # watch out for the fact that deleted issues on Google Code leave holes in the ID numbering.
- # We'll work around this by adding dummy issues until the numbers match again.
-
- if options.synchronize_ids:
- while previous_gid + 1 < gid:
- previous_gid += 1
- output("Using dummy entry for missing issue %d\n" % (previous_gid ))
- title = "Google Code skipped issue %d" % (previous_gid )
- if previous_gid not in existing_issues:
- body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
- link = GOOGLE_URL % (google_project_name, previous_gid)
- footer = GOOGLE_ISSUE_TEMPLATE % link
- body += '\n\n' + footer
- github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
- github_issue.edit(state = "closed")
- existing_issues[previous_gid]=github_issue
-
-
- # Add the issue and its comments to Github, if we haven't already
-
- if gid in existing_issues:
- github_issue = existing_issues[gid]
- output("Not adding issue %d (exists)" % gid)
- else: github_issue = add_issue_to_github(issue)
-
- if github_issue:
- add_comments_to_issue(github_issue, gid)
- if github_issue.state != issue.state.text:
- github_issue.edit(state = issue.state.text)
- output("\n")
-
- previous_gid = gid
-
- start_index += max_results
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
- if id_match:
- google_id = int(id_match.group(1))
- issue_map[google_id] = issue
- labels = [l.name for l in issue.get_labels()]
- if not 'imported' in labels:
- # TODO we could fix up the label here instead of just warning
- logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
+ if not id_match:
+ continue
+
+ google_id = int(id_match.group(1))
+ issue_map[google_id] = issue
+ labels = [l.name for l in issue.get_labels()]
+ if not 'imported' in labels:
+ # TODO we could fix up the label here instead of just warning
+ logging.warn('Issue missing imported label %s- %r - %s', google_id, labels, issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
- logging.error( 'Failed to enumerate existing issues')
+ logging.error('Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
- logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limiting))
+ logging.info('Rate limit (remaining/total) %r', github.rate_limiting)
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
- #logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
-
-if __name__ == "__main__":
+ #logging.info('Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
+if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
- parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
- label_cache = {} # Cache Github tags, to avoid unnecessary API requests
+ label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
-
- password_is_wrong = True
- while password_is_wrong:
+
+ while True:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
- password_is_wrong = False
- except GithubException, exception:
+ break
+ except BadCredentialsException:
print "Bad credentials, try again."
- google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
- try: github_owner = github.get_user(owner_name)
+ try:
+ github_owner = github.get_user(owner_name)
except GithubException:
- try: github_owner = github.get_organization(owner_name)
+ try:
+ github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
- else: github_owner = github_user
+ else:
+ github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
a86cb1569970353ac481a6f3cc644db38003d06f
|
Fix issue with empty comments
|
diff --git a/migrateissues.py b/migrateissues.py
index e19a307..8614bd5 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,413 +1,414 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail?id=%d'
GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL_RE
NUM_RE = re.compile('\s#(\d+)')
ISSUE_RE = re.compile('[I|i]ssue\s(\d+)')
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def mapissue(match):
"""Map a Google Code issue reference to the correct Github issue number """
old = match.group(1)
# TODO: map old issue to new issue
# can't assume 1:1 mapping due to missing issues on GC & added issues on Github
return 'issue #' +old
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
- s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
- s = s.replace('%', '%') # Escape % signs
- s = re.sub(ISSUE_RE,mapissue, s) # convert Google Code issue refs to Github markup
+ if s is not None:
+ s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
+ s = s.replace('%', '%') # Escape % signs
+ s = re.sub(ISSUE_RE,mapissue, s) # convert Google Code issue refs to Github markup
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = escape(comment.content.text)
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower() if issue.status else ""
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
footer = GOOGLE_ISSUE_TEMPLATE % link
body = "%s\n\n%s\n\n\n%s" % (header, content, footer)
body = escape(body)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retain compatibility with earlier versions of migrateissues.py
existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that deleted issues on Google Code leave holes in the ID numbering.
# We'll work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids:
while previous_gid + 1 < gid:
previous_gid += 1
output("Using dummy entry for missing issue %d\n" % (previous_gid ))
title = "Google Code skipped issue %d" % (previous_gid )
if previous_gid not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
link = GOOGLE_URL % (google_project_name, previous_gid)
footer = GOOGLE_ISSUE_TEMPLATE % link
body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
existing_issues[previous_gid]=github_issue
# Add the issue and its comments to Github, if we haven't already
if gid in existing_issues:
github_issue = existing_issues[gid]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limiting))
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
#logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
password_is_wrong = True
while password_is_wrong:
github_password = getpass.getpass("Github password: ")
try:
Github(github_user_name, github_password).get_user().login
password_is_wrong = False
except GithubException, exception:
print "Bad credentials, try again."
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
fdb9f44eeea695bf02189327f26c783b4badbd1c
|
AttributeError when issue.status is None
|
diff --git a/migrateissues.py b/migrateissues.py
index d527276..3aff14d 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,363 +1,363 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
- status = issue.status.text.lower()
+ status = issue.status.text.lower() if issue.status else ""
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retain compatibility with earlier versions of migrateissues.py
existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
c9c922ad2c33b2fc8026c85c58c03fc70db8f0c4
|
Catching a bad credentials Exception.
|
diff --git a/migrateissues.py b/migrateissues.py
index d527276..eb1aee9 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,363 +1,371 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retain compatibility with earlier versions of migrateissues.py
existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
- github_password = getpass.getpass("Github password: ")
+
+ password_is_wrong = True
+ while password_is_wrong:
+ github_password = getpass.getpass("Github password: ")
+ try:
+ Github(github_user_name, github_password).get_user().login
+ password_is_wrong = False
+ except GithubException, exception:
+ print "Bad credentials, try again."
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
0f1641cd97ef8f0b195b630eaab9330fd489b478
|
Finish up merge for changes on upstream
|
diff --git a/migrateissues.py b/migrateissues.py
index e3fe950..808a4ff 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,397 +1,405 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
+
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
-GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
-GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
+GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail?id=%d'
+GOOGLE_URL_RE = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
+GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL_RE
NUM_RE = re.compile('\s#(\d+)')
ISSUE_RE = re.compile('[I|i]ssue\s(\d+)')
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def mapissue(match):
"""Map a Google Code issue reference to the correct Github issue number """
old = match.group(1)
# TODO: map old issue to new issue
# can't assume 1:1 mapping due to missing issues on GC & added issues on Github
return 'issue #' +old
def escape(s):
"""Process text to convert markup and escape things which need escaping"""
s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
s = s.replace('%', '%') # Escape % signs
s = re.sub(ISSUE_RE,mapissue, s) # convert Google Code issue refs to Github markup
return s
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = escape(comment.content.text)
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
footer = GOOGLE_ISSUE_TEMPLATE % link
body = "%s\n\n%s\n\n\n%s" % (header, content, footer)
body = escape(body)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retain compatibility with earlier versions of migrateissues.py
existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
- # watch out for the fact that Google Code sometimes skips issue IDs. We'll
- # work around this by adding dummy issues until the numbers match again.
+ # watch out for the fact that deleted issues on Google Code leave holes in the ID numbering.
+ # We'll work around this by adding dummy issues until the numbers match again.
- if options.synchronize_ids and previous_gid + 1 < gid:
+ if options.synchronize_ids:
while previous_gid + 1 < gid:
- output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
- title = "Google Code skipped issue %d" % (previous_gid + 1)
- if title not in existing_issues:
+ previous_gid += 1
+ output("Using dummy entry for missing issue %d\n" % (previous_gid ))
+ title = "Google Code skipped issue %d" % (previous_gid )
+ if previous_gid not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
+ link = GOOGLE_URL % (google_project_name, previous_gid)
+ footer = GOOGLE_ISSUE_TEMPLATE % link
+ body += '\n\n' + footer
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
- previous_gid += 1
+ existing_issues[previous_gid]=github_issue
+
# Add the issue and its comments to Github, if we haven't already
- if gid in existing_issues.keys():
- github_issue = existing_issues[issue.title.text]
+ if gid in existing_issues:
+ github_issue = existing_issues[gid]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
log_rate_info()
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
- The result maps issue titles to their Github issue objects.
+ The result maps Google Code issue numbers to Github issue objects.
"""
output("Retrieving existing Github issues...\n")
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
- google_id = id_match.group(1)
+ google_id = int(id_match.group(1))
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
- # Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
- logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
+ logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limiting))
+ # Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
+ #logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
log_rate_info()
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d46003b8247c04a30c53f5174c20c74dd210a74d
|
Start of issue number mapping between old and new
|
diff --git a/migrateissues.py b/migrateissues.py
index 9337377..ad09e41 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,206 +1,211 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
NUM_RE = re.compile('\s#(\d+)')
ISSUE_RE = re.compile('[I|i]ssue\s(\d+)')
logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
+def mapissue(match):
+ old = match.group(1)
+ # TODO: map old issue to new issue
+ # can't assume 1:1 mapping due to missing issues on GC & added issues on Github
+ return 'issue #' +old
def escape(s):
s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
s = s.replace('%', '%') # Github chokes on % in the payload
- s = re.sub(ISSUE_RE,'issue #\g<1>', s) # convert Google Code issue refs
+ s = re.sub(ISSUE_RE,mapissue, s) # convert Google Code issue refs
return s
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
footer = GOOGLE_ISSUE_TEMPLATE % link
body = '%s\n\n%s\n\n\n%s' % (header, content, footer)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
body = escape(body)
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
content = escape(content)
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if id in existing_issues.keys():
github_issue = existing_issues[id]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
log_rate_info()
def get_existing_github_issues():
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
google_id = id_match.group(1)
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
log_rate_info()
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b6cd1637a46052db14cc9c00bb6017041e2d49d3
|
Add more complete escaping/conversion of body text
|
diff --git a/migrateissues.py b/migrateissues.py
index 2c34951..9337377 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,198 +1,206 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
+NUM_RE = re.compile('\s#(\d+)')
+ISSUE_RE = re.compile('[I|i]ssue\s(\d+)')
logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
+def escape(s):
+ s = re.sub(NUM_RE," # \g<1>", s) # escape things which look like Github issue refs
+ s = s.replace('%', '%') # Github chokes on % in the payload
+ s = re.sub(ISSUE_RE,'issue #\g<1>', s) # convert Google Code issue refs
+ return s
+
+
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
- # Note: the original issue template needs to match the regex
footer = GOOGLE_ISSUE_TEMPLATE % link
body = '%s\n\n%s\n\n\n%s' % (header, content, footer)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
- body = body.replace('%', '%')
+ body = escape(body)
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
- content = content.replace('%', '%') # Github chokes on % in the payload
+ content = escape(content)
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if id in existing_issues.keys():
github_issue = existing_issues[id]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
log_rate_info()
def get_existing_github_issues():
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
google_id = id_match.group(1)
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
def log_rate_info():
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
log_rate_info()
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
log_rate_info()
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
021d418a872df1f9bd4b9078cf8640f01e776a81
|
Centralize logging
|
diff --git a/migrateissues.py b/migrateissues.py
index c65750e..2c34951 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,195 +1,198 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
# Note: the original issue template needs to match the regex
footer = GOOGLE_ISSUE_TEMPLATE % link
body = '%s\n\n%s\n\n\n%s' % (header, content, footer)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
body = body.replace('%', '%')
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
content = content.replace('%', '%') # Github chokes on % in the payload
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if id in existing_issues.keys():
github_issue = existing_issues[id]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
- logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
+ log_rate_info()
def get_existing_github_issues():
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
google_id = id_match.group(1)
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
-
+def log_rate_info():
+ # Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
+ logging.info( 'Rate limit (remaining/total) %s',repr(github.rate_limit(refresh=True)))
+
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
+ log_rate_info()
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
- # Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
- logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
+ log_rate_info()
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
7fb1b42b4ad38d67ec39a558e9a4fad49e85dfd0
|
Fix typo introduced by mistake
|
diff --git a/migrateissues.py b/migrateissues.py
index 4108bae..c65750e 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,195 +1,195 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
# Note: the original issue template needs to match the regex
footer = GOOGLE_ISSUE_TEMPLATE % link
body = '%s\n\n%s\n\n\n%s' % (header, content, footer)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
body = body.replace('%', '%')
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
content = content.replace('%', '%') # Github chokes on % in the payload
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if id in existing_issues.keys():
github_issue = existing_issues[id]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
def get_existing_github_issues():
id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
issue_map = {}
for issue in existing_issues:
id_match = id_re.search(issue.body)
if id_match:
google_id = id_match.group(1)
issue_map[google_id] = issue
labels = [l.name for l in issue.get_labels()]
if not 'imported' in labels:
# TODO we could fix up the label here instead of just warning
logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
imported_count = len(issue_map)
logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
return issue_map
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
process_gcode_issues(existing_issues)
except:
- parser.printhelp()
+ parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
5ee6e283f30f12094e35c50e64c90ad4709e69fd
|
Key existing issues by ID instead of title
|
diff --git a/migrateissues.py b/migrateissues.py
index 7b9ac66..4108bae 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,183 +1,195 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
+GOOGLE_ISSUE_TEMPLATE = '_Original issue: %s_'
+GOOGLE_URL = 'http://code.google.com/p/%s/issues/detail\?id=(\d+)'
+GOOGLE_ID_RE = GOOGLE_ISSUE_TEMPLATE % GOOGLE_URL
logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
- body = '%s\n\n%s\n\n\n_Original issue: %s_' % (header, content, link)
+ # Note: the original issue template needs to match the regex
+ footer = GOOGLE_ISSUE_TEMPLATE % link
+ body = '%s\n\n%s\n\n\n%s' % (header, content, footer)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
body = body.replace('%', '%')
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
content = content.replace('%', '%') # Github chokes on % in the payload
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
- if issue.title.text in existing_issues.keys():
- github_issue = existing_issues[issue.title.text]
+ if id in existing_issues.keys():
+ github_issue = existing_issues[id]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
+ logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
def get_existing_github_issues():
+ id_re = re.compile(GOOGLE_ID_RE % google_project_name)
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
existing_count = len(existing_issues)
- existing_issues = filter(lambda i: 'imported' in [l.name for l in i.get_labels()], existing_issues)
- imported_count = len(existing_issues)
- existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
- unique_count = len(existing_issues)
- logging.info('Found %d Github issues, %d imported, %d unique titles',existing_count,imported_count,unique_count)
- if unique_count < imported_count:
- logging.warn('WARNING: %d duplicate issue titles',imported_count-unique_count)
+ issue_map = {}
+ for issue in existing_issues:
+ id_match = id_re.search(issue.body)
+ if id_match:
+ google_id = id_match.group(1)
+ issue_map[google_id] = issue
+ labels = [l.name for l in issue.get_labels()]
+ if not 'imported' in labels:
+ # TODO we could fix up the label here instead of just warning
+ logging.warn('Issue missing imported label %s- %s - %s',google_id,repr(labels),issue.title)
+ imported_count = len(issue_map)
+ logging.info('Found %d Github issues, %d imported',existing_count,imported_count)
except:
logging.error( 'Failed to enumerate existing issues')
raise
- return existing_issues
+ return issue_map
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
# Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
process_gcode_issues(existing_issues)
except:
- parser.print_help()
+ parser.printhelp()
raise
|
arthur-debert/google-code-issues-migrator
|
832eb5c2421dfb1cdb50e8eec5c5d5b843028014
|
Imporve error handling
|
diff --git a/migrateissues.py b/migrateissues.py
index 0ac7a05..7b9ac66 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,174 +1,183 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
from github import Github, GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
#GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
-logging.basicConfig(level=logging.ERROR)
+logging.basicConfig(level=logging.INFO)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
header = '_Original author: %s (%s)_' % (author, date)
body = '%s\n\n%s\n\n\n_Original issue: %s_' % (header, content, link)
# Github takes issue with % in the title or body.
title = title.replace('%', '%')
body = body.replace('%', '%')
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
github_issue.edit(state=issue.state.text)
try:
import_label = github_repo.get_label('imported')
except GithubException:
import_label = github_repo.create_label('imported', 'FFFFFF')
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state='closed')
else:
# don't actually open an issue during a dry run...
class blank:
def get_comments(self):
return []
github_issue = blank()
# Add any labels
label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
# get github equivalent if it exists
label_text = label_mapping.get(label.text, label.text)
if not options.dry_run:
try:
github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, 'FFFFFF')
github_issue.add_to_labels(github_label)
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github_issue.get_comments()
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
content = content.replace('%', '%') # Github chokes on % in the payload
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github_issue.create_comment(body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues.keys():
github_issue = existing_issues[issue.title.text]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
def get_existing_github_issues():
try:
existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
+ existing_count = len(existing_issues)
existing_issues = filter(lambda i: 'imported' in [l.name for l in i.get_labels()], existing_issues)
+ imported_count = len(existing_issues)
existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
+ unique_count = len(existing_issues)
+ logging.info('Found %d Github issues, %d imported, %d unique titles',existing_count,imported_count,unique_count)
+ if unique_count < imported_count:
+ logging.warn('WARNING: %d duplicate issue titles',imported_count-unique_count)
except:
- existing_issues = {}
+ logging.error( 'Failed to enumerate existing issues')
+ raise
return existing_issues
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass('Github password: ')
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
+ # Note: this requires extended version of PyGithub from tfmorris/PyGithub repo
+ logging.info( 'Rate limit (remaining/toal) %s',repr(github.rate_limit(refresh=True)))
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
4832e1a96b2b5f1b4189715951159d323d3284be
|
2012.10.14 update incoporating dnschur's fork.
|
diff --git a/CHANGES.md b/CHANGES.md
index 74bc5bd..fc55713 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,50 +1,58 @@
This is a change-history and list of contributors to the script.
-## Even newer version by David Schnur ##
+## 2012.10.14 by [Joel Thornton](http://github.com/joelpt) ##
+
+ - Improved compatibility with respect to comments migrated by a previous version of
+ this script.
+
+
+## 2012.09.28 by [David Schnur](http://github.com/dnschur) ##
https://github.com/dnschnur/google-code-issues-migrator
### New Features ###
- Greatly optimized Github API usage, allowing the script to process several times as
many issues before reaching the API's hourly rate-limit.
- The script now tries to avoid hitting the Github API's rate-limit part-way through
processing an issue, to avoid leaving it in an incomplete state.
- Improved support of duplicate / merged issues, by detecting the 'merged into' update
and generating a Github comment pointing to the parent issue. The automatically-added
'issue x is a duplicate of this issue' comments are now filtered out, since Github
already shows a reference when the duplicate links back to the parent.
- Added migration of Google Code statuses like 'invalid', 'wontfix' and 'duplicate';
these now map to the matching Github tags.
- The script now accepts Github projects in the form user/project, where user can be an
organization or a different user from the one running the script. This still requires
that the user running the script have enough permissions on the repository, of course.
- Added an option to keep issue numbers in sync, by handling cases where Google Code
skipped an issue number.
- New issues are now marked closed after all comments have been added, to better mimic
the order of that update in most real-world cases.
- Added an option to automatically assign issues that have an owner in Google Code.
- Added an option to omit migration of Google Code Priority labels.
### Bug Fixes ###
- Comments containing percent-signs are no longer added repeatedly when the script is run
multiple times.
-## Newer version by Jake Biesinger ##
+
+## 2012.09.24 by [Jake Biesinger](http://github.com/jakebiesinger) ##
https://github.com/jakebiesinger/google-code-issues-migrator
- Switched to PyGithub in order to support the Github v3 API.
-## Original version by Arthur Debert (and many other contributors) ##
-https://github.com/arthur-debert/google-code-issues-migrator
+## Original version by [Arthur Debert](http://github.com/arthur-debert) (and many other contributors) ##
+
+http://github.com/arthur-debert/google-code-issues-migrator
\ No newline at end of file
|
arthur-debert/google-code-issues-migrator
|
e2717c7e3805d5862f9b6d08660259988605b19d
|
Tidy up readme.
|
diff --git a/README.md b/README.md
index 427e2c2..4bf0881 100644
--- a/README.md
+++ b/README.md
@@ -1,69 +1,60 @@
This is a simple script to migrate issues from Google Code to Github.
-For a full history of changes, including the many additions found in this fork, please
-consult the [change log](https://github.com/dnschnur/google-code-issues-migrator/blob/master/CHANGES.md).
+For a full history of changes, please
+consult the [change log](https://github.com/arthur-debert/google-code-issues-migrator/blob/master/CHANGES.md).
### How it works ###
The script iterates over the issues and comments in a Google Code repository, creating
matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the script, and lose
their original creation date. We try to mitigate this by adding a non-obtrusive header
to each issue and comment stating the original author and creation date.
- Attachments are lost, since Github doesn't support them in issues or comments.
Otherwise almost everything is preserved, including labels, issue state (open/closed),
issue status (invalid, wontfix, duplicate) and merged-into links for duplicate issues.
The script can be run repeatedly to migrate new issues and comments, without mucking up
what's already on Github.
### Required Python libraries ###
* [gdata](http://code.google.com/p/gdata-python-client/) -- `pip install gdata`
-* [PyGithub](https://github.com/jacquev6/PyGithub/) -- `pip install PyGithub`
-
-Note that you must use version 1.8.0 or greater of PyGithub. This is due to a limitation
-in Github's API that results in a 500 error if clients try to submit a string containing a
-percent-sign. Earlier versions of this script worked around the problem by escaping the
-symbol, but this caused further problems by making it more difficult to correlate original
-Google Code issues with their migrated copies.
-
-We removed the script's escaping once PyGithub introduced a work-around for the problem,
-which was first included in their 1.8.0 release.
+* [PyGithub](https://github.com/jacquev6/PyGithub/) -- `pip install PyGithub` -- v1.8.0+ required
### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
You will be prompted for your github password.
---assign-owner automatically assigns any issues that currently have an owner to your
-Github user (the one running the script), even if you weren't the origina lowner. This
+`--assign-owner` automatically assigns any issues that currently have an owner to your
+Github user (the one running the script), even if you weren't the original owner. This
is used to save a little time in cases where you do in fact own most issues.
---dry-run does as much as possible without actually adding anything to Github. It's
+`--dry-run` does as much as possible without actually adding anything to Github. It's
useful as a test, to turn up any errors or unexpected behaviors before you run the script,
irreversibly, on your real repository.
---omit-priorities skips migration of Google Code Priority labels, since many projects
+`--omit-priorities` skips migration of Google Code Priority labels, since many projects
don't actually use them, and would just remove them from Github anyway.
---synchronize-ids attempts to ensure that every Github issue gets the same ID as its
+`--synchronize-ids` attempts to ensure that every Github issue gets the same ID as its
original Google Code issue. Normally this happens anyway, but in some cases Google Code
skips issue numbers; this option fills the gaps with dummy issues to ensure that the next
real issue keeps the same numbering. This only works, of course, if the migration starts
with a fresh Github repistory.
|
arthur-debert/google-code-issues-migrator
|
9291ca85d7e932f3132349334b6cdf40befff5ad
|
Retain comment-matching compatibility with how earlier versions of migrateissues.py formatted comment bodies.
|
diff --git a/migrateissues.py b/migrateissues.py
index 339243d..d527276 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,359 +1,363 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
- """ Transforms a Google Code date into """
+ """ Transforms a Google Code date into a more human readable string. """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
+ # Retain compatibility with earlier versions of migrateissues.py
+
+ existing_comments = [ re.sub(r'^(.+):_\n', r'\1_\n', body) for body in existing_comments ]
+
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
1e850d17410476d517a0aa6c7e0077b06e98f593
|
Added an option to specify a base issue number.
|
diff --git a/migrateissues.py b/migrateissues.py
index bde24ed..339243d 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,358 +1,359 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
- return "_This issue is a duplicate of #%s_" % comment.updates.mergedIntoUpdate.text
+ return "_This issue is a duplicate of #%d_" % (options.base_id + int(comment.updates.mergedIntoUpdate.text))
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
- parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
- parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
+ parser.add_option("-b", "--base-id", type = "int", action = "store", dest = "base_id", help = "Number of issues in Github before migration", default = 0)
+ parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
+ parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
bacf1e201c2efeeca62026e5d952b5de7c27419f
|
Tweaked sizes of headings.
|
diff --git a/CHANGES.md b/CHANGES.md
index 4a4a202..74bc5bd 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,50 +1,50 @@
This is a change-history and list of contributors to the script.
-### Even newer version by David Schnur ###
+## Even newer version by David Schnur ##
https://github.com/dnschnur/google-code-issues-migrator
-#### New Features ####
+### New Features ###
- Greatly optimized Github API usage, allowing the script to process several times as
many issues before reaching the API's hourly rate-limit.
- The script now tries to avoid hitting the Github API's rate-limit part-way through
processing an issue, to avoid leaving it in an incomplete state.
- Improved support of duplicate / merged issues, by detecting the 'merged into' update
and generating a Github comment pointing to the parent issue. The automatically-added
'issue x is a duplicate of this issue' comments are now filtered out, since Github
already shows a reference when the duplicate links back to the parent.
- Added migration of Google Code statuses like 'invalid', 'wontfix' and 'duplicate';
these now map to the matching Github tags.
- The script now accepts Github projects in the form user/project, where user can be an
organization or a different user from the one running the script. This still requires
that the user running the script have enough permissions on the repository, of course.
- Added an option to keep issue numbers in sync, by handling cases where Google Code
skipped an issue number.
- New issues are now marked closed after all comments have been added, to better mimic
the order of that update in most real-world cases.
- Added an option to automatically assign issues that have an owner in Google Code.
- Added an option to omit migration of Google Code Priority labels.
-#### Bug Fixes ####
+### Bug Fixes ###
- Comments containing percent-signs are no longer added repeatedly when the script is run
multiple times.
-### Newer version by Jake Biesinger ###
+## Newer version by Jake Biesinger ##
https://github.com/jakebiesinger/google-code-issues-migrator
- Switched to PyGithub in order to support the Github v3 API.
-### Original version by Arthur Debert (and many other contributors) ###
+## Original version by Arthur Debert (and many other contributors) ##
https://github.com/arthur-debert/google-code-issues-migrator
diff --git a/README.md b/README.md
index 5059d7c..427e2c2 100644
--- a/README.md
+++ b/README.md
@@ -1,69 +1,69 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, including the many additions found in this fork, please
consult the [change log](https://github.com/dnschnur/google-code-issues-migrator/blob/master/CHANGES.md).
-## How it works ##
+### How it works ###
The script iterates over the issues and comments in a Google Code repository, creating
matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the script, and lose
their original creation date. We try to mitigate this by adding a non-obtrusive header
to each issue and comment stating the original author and creation date.
- Attachments are lost, since Github doesn't support them in issues or comments.
Otherwise almost everything is preserved, including labels, issue state (open/closed),
issue status (invalid, wontfix, duplicate) and merged-into links for duplicate issues.
The script can be run repeatedly to migrate new issues and comments, without mucking up
what's already on Github.
-## Required Python libraries ##
+### Required Python libraries ###
* [gdata](http://code.google.com/p/gdata-python-client/) -- `pip install gdata`
* [PyGithub](https://github.com/jacquev6/PyGithub/) -- `pip install PyGithub`
Note that you must use version 1.8.0 or greater of PyGithub. This is due to a limitation
in Github's API that results in a 500 error if clients try to submit a string containing a
percent-sign. Earlier versions of this script worked around the problem by escaping the
symbol, but this caused further problems by making it more difficult to correlate original
Google Code issues with their migrated copies.
We removed the script's escaping once PyGithub introduced a work-around for the problem,
which was first included in their 1.8.0 release.
-## Usage ##
+### Usage ###
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
You will be prompted for your github password.
--assign-owner automatically assigns any issues that currently have an owner to your
Github user (the one running the script), even if you weren't the origina lowner. This
is used to save a little time in cases where you do in fact own most issues.
--dry-run does as much as possible without actually adding anything to Github. It's
useful as a test, to turn up any errors or unexpected behaviors before you run the script,
irreversibly, on your real repository.
--omit-priorities skips migration of Google Code Priority labels, since many projects
don't actually use them, and would just remove them from Github anyway.
--synchronize-ids attempts to ensure that every Github issue gets the same ID as its
original Google Code issue. Normally this happens anyway, but in some cases Google Code
skips issue numbers; this option fills the gaps with dummy issues to ensure that the next
real issue keeps the same numbering. This only works, of course, if the migration starts
with a fresh Github repistory.
|
arthur-debert/google-code-issues-migrator
|
b895e6a74cee39f5de823e3320843a11a9f6f429
|
Correct URL pointing at CHANGES.md.
|
diff --git a/README.md b/README.md
index 567febb..5059d7c 100644
--- a/README.md
+++ b/README.md
@@ -1,69 +1,69 @@
This is a simple script to migrate issues from Google Code to Github.
For a full history of changes, including the many additions found in this fork, please
-consult the [change log](CHANGES.md).
+consult the [change log](https://github.com/dnschnur/google-code-issues-migrator/blob/master/CHANGES.md).
## How it works ##
The script iterates over the issues and comments in a Google Code repository, creating
matching issues and comments in Github. This has some limitations:
- All migrated issues and comments are authored by the user running the script, and lose
their original creation date. We try to mitigate this by adding a non-obtrusive header
to each issue and comment stating the original author and creation date.
- Attachments are lost, since Github doesn't support them in issues or comments.
Otherwise almost everything is preserved, including labels, issue state (open/closed),
issue status (invalid, wontfix, duplicate) and merged-into links for duplicate issues.
The script can be run repeatedly to migrate new issues and comments, without mucking up
what's already on Github.
## Required Python libraries ##
* [gdata](http://code.google.com/p/gdata-python-client/) -- `pip install gdata`
* [PyGithub](https://github.com/jacquev6/PyGithub/) -- `pip install PyGithub`
Note that you must use version 1.8.0 or greater of PyGithub. This is due to a limitation
in Github's API that results in a 500 error if clients try to submit a string containing a
percent-sign. Earlier versions of this script worked around the problem by escaping the
symbol, but this caused further problems by making it more difficult to correlate original
Google Code issues with their migrated copies.
We removed the script's escaping once PyGithub introduced a work-around for the problem,
which was first included in their 1.8.0 release.
## Usage ##
migrateissues.py [options] <google project name> <github username> <github project>
google_project_name The project name (from the URL) from google code
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help show this help message and exit
-a, --assign-owner Assign owned issues to the Github user
-d, --dry-run Don't modify anything on Github
-p, --omit-priority Don't migrate priority labels
-s, --synchronize-ids Ensure that migrated issues keep the same ID
You will be prompted for your github password.
--assign-owner automatically assigns any issues that currently have an owner to your
Github user (the one running the script), even if you weren't the origina lowner. This
is used to save a little time in cases where you do in fact own most issues.
--dry-run does as much as possible without actually adding anything to Github. It's
useful as a test, to turn up any errors or unexpected behaviors before you run the script,
irreversibly, on your real repository.
--omit-priorities skips migration of Google Code Priority labels, since many projects
don't actually use them, and would just remove them from Github anyway.
--synchronize-ids attempts to ensure that every Github issue gets the same ID as its
original Google Code issue. Normally this happens anyway, but in some cases Google Code
skips issue numbers; this option fills the gaps with dummy issues to ensure that the next
real issue keeps the same numbering. This only works, of course, if the migration starts
with a fresh Github repistory.
|
arthur-debert/google-code-issues-migrator
|
51ef0df83de9c3ba51650c290650c3b98b3cf861
|
Added a changelog to better document updates.
|
diff --git a/CHANGES.md b/CHANGES.md
new file mode 100644
index 0000000..4a4a202
--- /dev/null
+++ b/CHANGES.md
@@ -0,0 +1,50 @@
+This is a change-history and list of contributors to the script.
+
+### Even newer version by David Schnur ###
+
+https://github.com/dnschnur/google-code-issues-migrator
+
+#### New Features ####
+
+ - Greatly optimized Github API usage, allowing the script to process several times as
+ many issues before reaching the API's hourly rate-limit.
+
+ - The script now tries to avoid hitting the Github API's rate-limit part-way through
+ processing an issue, to avoid leaving it in an incomplete state.
+
+ - Improved support of duplicate / merged issues, by detecting the 'merged into' update
+ and generating a Github comment pointing to the parent issue. The automatically-added
+ 'issue x is a duplicate of this issue' comments are now filtered out, since Github
+ already shows a reference when the duplicate links back to the parent.
+
+ - Added migration of Google Code statuses like 'invalid', 'wontfix' and 'duplicate';
+ these now map to the matching Github tags.
+
+ - The script now accepts Github projects in the form user/project, where user can be an
+ organization or a different user from the one running the script. This still requires
+ that the user running the script have enough permissions on the repository, of course.
+
+ - Added an option to keep issue numbers in sync, by handling cases where Google Code
+ skipped an issue number.
+
+ - New issues are now marked closed after all comments have been added, to better mimic
+ the order of that update in most real-world cases.
+
+ - Added an option to automatically assign issues that have an owner in Google Code.
+
+ - Added an option to omit migration of Google Code Priority labels.
+
+#### Bug Fixes ####
+
+ - Comments containing percent-signs are no longer added repeatedly when the script is run
+ multiple times.
+
+### Newer version by Jake Biesinger ###
+
+https://github.com/jakebiesinger/google-code-issues-migrator
+
+ - Switched to PyGithub in order to support the Github v3 API.
+
+### Original version by Arthur Debert (and many other contributors) ###
+
+https://github.com/arthur-debert/google-code-issues-migrator
|
arthur-debert/google-code-issues-migrator
|
8196d04bb759e4dae980b9fde0906758226ed4d8
|
Fixed a minor typo.
|
diff --git a/migrateissues.py b/migrateissues.py
index aabc41f..bde24ed 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,358 +1,358 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%s_" % comment.updates.mergedIntoUpdate.text
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
- parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
+ parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned issues to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
661cd5fd2f227e2005484ea9cd913573642a2ce3
|
Slightly clearer dummy entry status message.
|
diff --git a/migrateissues.py b/migrateissues.py
index 7437bc1..aabc41f 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,358 +1,358 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%s_" % comment.updates.mergedIntoUpdate.text
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
- output("Adding dummy issue %d\n" % (previous_gid + 1))
+ output("Using dummy entry for missing issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
if github_issue.state != issue.state.text:
github_issue.edit(state = issue.state.text)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
4e057529152bbb2fb0453b357a767c726833ee6e
|
Fix infinite loop when re-adding dummy issues.
|
diff --git a/migrateissues.py b/migrateissues.py
index 29b5c59..45d7dbc 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,358 +1,358 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# Patch gdata's CommentEntry Updates object to include the merged-into field
class MergedIntoUpdate(XmlElement):
_qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def should_migrate_comment(comment):
""" Returns True if the given comment should be migrated to Github, otherwise False.
A comment should be migrated if it represents a duplicate-merged-into update, or if
it has a body that isn't the automated 'issue x has been merged into this issue'.
"""
if comment.content.text:
if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
return False
return True
elif comment.updates.mergedIntoUpdate:
return True
return False
def format_comment(comment):
""" Returns the Github comment body for the given Google Code comment.
Most comments are left unchanged, except to add a header identifying their original
author and post-date. Google Code's merged-into comments, used to flag duplicate
issues, are replaced with a little message linking to the parent issue.
"""
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
if comment.updates.mergedIntoUpdate:
return "_This issue is a duplicate of #%s_" % comment.updates.mergedIntoUpdate.text
else: return "_From %s on %s_\n%s" % (author, date, content)
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve existing Github comments, to figure out which Google Code comments are new
existing_comments = [ comment.body for comment in github_issue.get_comments() ]
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty and otherwise unnecessary comments, unless they contain the
# 'migrated into' update for a duplicate issue; we'll generate a special Github
# comment for those.
comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Adding dummy issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
- previous_gid += 1
+ previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
2a4e1c914dca0e7eca053773df0bea6047f1425d
|
Added support for duplicate and merged issues.
|
diff --git a/migrateissues.py b/migrateissues.py
index 4fe4b6a..29b5c59 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,318 +1,358 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
+from atom.core import XmlElement
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
+# Patch gdata's CommentEntry Updates object to include the merged-into field
+
+class MergedIntoUpdate(XmlElement):
+ _qname = gdata.projecthosting.data.ISSUES_TEMPLATE % 'mergedIntoUpdate'
+gdata.projecthosting.data.Updates.mergedIntoUpdate = MergedIntoUpdate
+
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
+def should_migrate_comment(comment):
+
+ """ Returns True if the given comment should be migrated to Github, otherwise False.
+
+ A comment should be migrated if it represents a duplicate-merged-into update, or if
+ it has a body that isn't the automated 'issue x has been merged into this issue'.
+
+ """
+
+ if comment.content.text:
+ if re.match(r"Issue (\d+) has been merged into this issue.", comment.content.text):
+ return False
+ return True
+ elif comment.updates.mergedIntoUpdate:
+ return True
+ return False
+
+
+def format_comment(comment):
+
+ """ Returns the Github comment body for the given Google Code comment.
+
+ Most comments are left unchanged, except to add a header identifying their original
+ author and post-date. Google Code's merged-into comments, used to flag duplicate
+ issues, are replaced with a little message linking to the parent issue.
+
+ """
+
+ author = comment.author[0].name.text
+ date = parse_gcode_date(comment.published.text)
+ content = comment.content.text
+
+ if comment.updates.mergedIntoUpdate:
+ return "_This issue is a duplicate of #%s_" % comment.updates.mergedIntoUpdate.text
+ else: return "_From %s on %s_\n%s" % (author, date, content)
+
+
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
+ # Retrieve existing Github comments, to figure out which Google Code comments are new
+
+ existing_comments = [ comment.body for comment in github_issue.get_comments() ]
+
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
- # Filter out empty comments
+ # Filter out empty and otherwise unnecessary comments, unless they contain the
+ # 'migrated into' update for a duplicate issue; we'll generate a special Github
+ # comment for those.
- comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
-
- # Filter out any comments that already exist in Github and are tagged as imported
-
- existing_comments = github_issue.get_comments()
- existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
- existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
- comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
+ comments = [ comment for comment in comments_feed.entry if should_migrate_comment(comment) and format_comment(comment) not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
+
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
- author = comment.author[0].name.text
- date = parse_gcode_date(comment.published.text)
- content = comment.content.text
-
- body = "_From %s on %s_\n%s" % (author, date, content)
+ body = format_comment(comment)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
output("Adding dummy issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
1cb904ada70774f8888e36cc7375c5c218b97f92
|
Fixed a string formatting error.
|
diff --git a/migrateissues.py b/migrateissues.py
index 96729a8..4fe4b6a 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,317 +1,318 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = "_From %s on %s_\n%s" % (author, date, content)
logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
+
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
- output("Adding dummy issue %d" % previous_gid + 1)
+ output("Adding dummy issue %d\n" % (previous_gid + 1))
title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
567dc06eda8172182510ab75f274cac0092e4d79
|
Interpret Google Code issue IDs as integers.
|
diff --git a/migrateissues.py b/migrateissues.py
index 490c093..96729a8 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,316 +1,317 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
- return re.search("\d+$", id_text).group(0)
+ return int(re.search("\d+$", id_text).group(0))
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
- output("Adding issue %s" % gid)
+ output("Adding issue %d" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
-def add_comments_to_issue(github_issue, gcode_issue_id):
+def add_comments_to_issue(github_issue, gid):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
- comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
+ comments_feed = google.get_comments(google_project_name, gid, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = "_From %s on %s_\n%s" % (author, date, content)
- logging.info("Adding comment %s", gid)
+ logging.info("Adding comment %d", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
- title = "Google Code skipped issue %s" % (previous_gid + 1)
+ output("Adding dummy issue %d" % previous_gid + 1)
+ title = "Google Code skipped issue %d" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
- output("Not adding issue %s (exists)" % gid)
+ output("Not adding issue %d (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
- output("Identifying previously-migrated issues...\n")
+ output("Retrieved %d issues; identifying ones already migrated...\n" % len(issues))
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
e1263478c36a805a5311f06457b80f9879117da0
|
Remove unnecessary escaping of percent signs.
|
diff --git a/migrateissues.py b/migrateissues.py
index 7f7ae16..490c093 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,323 +1,316 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
-def github_escape(string):
-
- """ Returns a copy of the string sanitized for use in Github. """
-
- return string.replace("%", "%")
-
-
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
- title = github_escape(issue.title.text)
+ title = issue.title.text
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
- body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
+ body = "%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link)
output("Adding issue %s" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
- body = github_escape("_From %s on %s_\n%s" % (author, date, content))
+ body = "_From %s on %s_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
title = "Google Code skipped issue %s" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Identifying previously-migrated issues...\n")
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
# If the project name is specified as owner/project, assume that it's owned by either
# a different user than the one we have credentials for, or an organization.
if "/" in github_project:
owner_name, github_project = github_project.split("/")
try: github_owner = github.get_user(owner_name)
except GithubException:
try: github_owner = github.get_organization(owner_name)
except GithubException:
github_owner = github_user
else: github_owner = github_user
github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
50fea56edeb511f537c8ce9a12ce949caec94b9b
|
Accept project names in the form owner/project.
|
diff --git a/migrateissues.py b/migrateissues.py
index 8ddeea1..7f7ae16 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,310 +1,323 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
# If we're trying to do a complete migration to a fresh Github project, and
# want to keep the issue numbers synced with Google Code's, then we need to
# watch out for the fact that Google Code sometimes skips issue IDs. We'll
# work around this by adding dummy issues until the numbers match again.
if options.synchronize_ids and previous_gid + 1 < gid:
while previous_gid + 1 < gid:
title = "Google Code skipped issue %s" % (previous_gid + 1)
if title not in existing_issues:
body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
github_issue.edit(state = "closed")
previous_gid += 1
# Add the issue and its comments to Github, if we haven't already
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
previous_gid = gid
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Identifying previously-migrated issues...\n")
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
- github_repo = github_user.get_repo(github_project)
+
+ # If the project name is specified as owner/project, assume that it's owned by either
+ # a different user than the one we have credentials for, or an organization.
+
+ if "/" in github_project:
+ owner_name, github_project = github_project.split("/")
+ try: github_owner = github.get_user(owner_name)
+ except GithubException:
+ try: github_owner = github.get_organization(owner_name)
+ except GithubException:
+ github_owner = github_user
+ else: github_owner = github_user
+
+ github_repo = github_owner.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
956fbcee2ee447702c9df9cf8b8e24a52e29532e
|
Added an option to synchronize issue numbers.
|
diff --git a/migrateissues.py b/migrateissues.py
index c56e7e5..8ddeea1 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,287 +1,310 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# The minimum number of remaining Github rate-limited API requests before we pre-emptively
# abort to avoid hitting the limit part-way through migrating an issue.
GITHUB_SPARE_REQUESTS = 50
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
# through adding an issue it could end up in an incomplete state. To avoid this we'll
# ensure that there are enough requests remaining before we start migrating an issue.
if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
raise Exception("Aborting to to impending Github API rate-limit cutoff.")
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
+ previous_gid = 0
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
+
+ # If we're trying to do a complete migration to a fresh Github project, and
+ # want to keep the issue numbers synced with Google Code's, then we need to
+ # watch out for the fact that Google Code sometimes skips issue IDs. We'll
+ # work around this by adding dummy issues until the numbers match again.
+
+ if options.synchronize_ids and previous_gid + 1 < gid:
+ while previous_gid + 1 < gid:
+ title = "Google Code skipped issue %s" % (previous_gid + 1)
+ if title not in existing_issues:
+ body = "_Skipping this issue number to maintain synchronization with Google Code issue IDs._"
+ github_issue = github_repo.create_issue(title, body = body, labels = [github_label("imported")])
+ github_issue.edit(state = "closed")
+ previous_gid += 1
+
+ # Add the issue and its comments to Github, if we haven't already
+
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
+
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
+
+ previous_gid = gid
+
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Identifying previously-migrated issues...\n")
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
+ parser.add_option("-s", "--synchronize-ids", action = "store_true", dest = "synchronize_ids", help = "Ensure that migrated issues keep the same ID", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
59a5e377bd0a26771ff88c8b121333ed8fcbac5a
|
Pre-emptively abort when approaching rate-limit.
|
diff --git a/migrateissues.py b/migrateissues.py
index a883e02..c56e7e5 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,275 +1,287 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
+# The minimum number of remaining Github rate-limited API requests before we pre-emptively
+# abort to avoid hitting the limit part-way through migrating an issue.
+
+GITHUB_SPARE_REQUESTS = 50
+
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
+ # Github rate-limits API requests to 5000 per hour, and if we hit that limit part-way
+ # through adding an issue it could end up in an incomplete state. To avoid this we'll
+ # ensure that there are enough requests remaining before we start migrating an issue.
+
+ if github.rate_limiting[0] < GITHUB_SPARE_REQUESTS:
+ raise Exception("Aborting to to impending Github API rate-limit cutoff.")
+
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
output("Retrieving existing Github issues...\n")
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
output("Identifying previously-migrated issues...\n")
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
a4527d816b14cb74bf611b19ab3ad5762faab24e
|
Extra logging when retrieving existing issues.
|
diff --git a/migrateissues.py b/migrateissues.py
index 4c34fc7..a883e02 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,272 +1,275 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return label_cache[name]
except KeyError:
try: return label_cache.setdefault(name, github_repo.get_label(name))
except GithubException:
return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
# Build a list of labels to apply to the new issue, including an 'imported' tag that
# we can use to identify this issue as one that's passed through migration.
labels = ["imported"]
# Convert Google Code labels to Github labels where possible
if issue.label:
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
labels.append(LABEL_MAPPING.get(label.text, label.text))
# Add additional labels based on the issue's state
if status in STATE_MAPPING:
labels.append(STATE_MAPPING[status])
# Add the new Github issue with its labels and a header identifying it as migrated
github_issue = None
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
if not options.dry_run:
github_labels = [ github_label(label) for label in labels ]
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
if issue.state.text != "open":
github_issue.edit(state = issue.state.text)
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner:
assignee = github.get_user(github_user.login)
if not options.dry_run:
github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
+ output("Retrieving existing Github issues...\n")
+
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
+ output("Identifying previously-migrated issues...\n")
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
label_cache = {} # Cache Github tags, to avoid unnecessary API requests
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
982e60fe73b37f73e22040d8baf8d81f6fd06071
|
Reduce API requests resulting from issue labels.
|
diff --git a/migrateissues.py b/migrateissues.py
index 8907c71..4c34fc7 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,266 +1,272 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
-# Mapping from Google Code issue labels to Github tags
+# Mapping from Google Code issue labels to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
-# Mapping from Google Code issue states to Github tags
+# Mapping from Google Code issue states to Github labels
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
- try: return github_repo.get_label(name)
- except GithubException:
- return github_repo.create_label(name, color)
+ try: return label_cache[name]
+ except KeyError:
+ try: return label_cache.setdefault(name, github_repo.get_label(name))
+ except GithubException:
+ return label_cache.setdefault(name, github_repo.create_label(name, color))
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
- header = "_Original author: %s (%s)_" % (author, date)
- body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
+ # Build a list of labels to apply to the new issue, including an 'imported' tag that
+ # we can use to identify this issue as one that's passed through migration.
- output("Adding issue %s" % gid)
+ labels = ["imported"]
- github_issue = None
+ # Convert Google Code labels to Github labels where possible
- if not options.dry_run:
+ if issue.label:
+ for label in issue.label:
+ if label.text.startswith("Priority-") and options.omit_priority:
+ continue
+ labels.append(LABEL_MAPPING.get(label.text, label.text))
- github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
- github_issue.edit(state = issue.state.text)
+ # Add additional labels based on the issue's state
- # Add an 'imported' tag so it's easy to identify issues that we created
+ if status in STATE_MAPPING:
+ labels.append(STATE_MAPPING[status])
- github_issue.add_to_labels(github_label("imported"))
+ # Add the new Github issue with its labels and a header identifying it as migrated
- # Add additional tags based on the issue's state
+ github_issue = None
- if status in STATE_MAPPING:
- github_issue.add_to_labels(github_label(status))
+ header = "_Original author: %s (%s)_" % (author, date)
+ body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
- # Assigns issues that originally had an owner to the current user
+ output("Adding issue %s" % gid)
- if issue.owner and options.assign_owner and not options.dry_run:
- github_issue.edit(assignee = github.get_user(github_user.login))
+ if not options.dry_run:
+ github_labels = [ github_label(label) for label in labels ]
+ github_issue = github_repo.create_issue(title, body = body.encode("utf-8"), labels = github_labels)
+ if issue.state.text != "open":
+ github_issue.edit(state = issue.state.text)
- # Convert Google Code labels to Github tags where possible
+ # Assigns issues that originally had an owner to the current user
- if issue.label:
- output(", adding labels")
- for label in issue.label:
- if label.text.startswith("Priority-") and options.omit_priority:
- continue
- label_text = LABEL_MAPPING.get(label.text, label.text)
- if not options.dry_run:
- github_issue.add_to_labels(github_label(label_text))
- output(".")
+ if issue.owner and options.assign_owner:
+ assignee = github.get_user(github_user.login)
+ if not options.dry_run:
+ github_issue.edit(assignee = assignee)
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
+ label_cache = {} # Cache Github tags, to avoid unnecessary API requests
+
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
af4b192bd9a89559d11c8be41000b721ef4a4140
|
Minor tweak to migrated comment header.
|
diff --git a/migrateissues.py b/migrateissues.py
index 75c30b6..8907c71 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,266 +1,266 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github tags
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = issue.state.text)
# Add an 'imported' tag so it's easy to identify issues that we created
github_issue.add_to_labels(github_label("imported"))
# Add additional tags based on the issue's state
if status in STATE_MAPPING:
github_issue.add_to_labels(github_label(status))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
- body = github_escape("_From %s on %s:_\n%s" % (author, date, content))
+ body = github_escape("_From %s on %s_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
f7bdd808504ad5e954938cd2824e702b9c2efa5b
|
Fixed status label migration.
|
diff --git a/migrateissues.py b/migrateissues.py
index 57a1382..75c30b6 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,266 +1,266 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github tags
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate",
'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
- state = issue.state.text
+ status = issue.status.text.lower()
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
- github_issue.edit(state = state)
+ github_issue.edit(state = issue.state.text)
# Add an 'imported' tag so it's easy to identify issues that we created
github_issue.add_to_labels(github_label("imported"))
# Add additional tags based on the issue's state
- if state in STATE_MAPPING:
- github_issue.add_to_labels(github_label(state))
+ if status in STATE_MAPPING:
+ github_issue.add_to_labels(github_label(status))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s:_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
3b7716bffb41d676320cd94babfd3a53bf2a0e5c
|
Added won't-fix to the migrated state tags.
|
diff --git a/migrateissues.py b/migrateissues.py
index 6de5627..57a1382 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,265 +1,266 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github tags
STATE_MAPPING = {
'invalid': "invalid",
- 'duplicate': "duplicate"
+ 'duplicate': "duplicate",
+ 'wontfix': "wontfix"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
state = issue.state.text
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = state)
# Add an 'imported' tag so it's easy to identify issues that we created
github_issue.add_to_labels(github_label("imported"))
# Add additional tags based on the issue's state
if state in STATE_MAPPING:
github_issue.add_to_labels(github_label(state))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
if label.text.startswith("Priority-") and options.omit_priority:
continue
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s:_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
06ed3b22f6787f2131d13690cb68b9ab5587528b
|
Added an option to omit priority labels.
|
diff --git a/migrateissues.py b/migrateissues.py
index fd5b65a..6de5627 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,262 +1,265 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github tags
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
state = issue.state.text
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = state)
# Add an 'imported' tag so it's easy to identify issues that we created
github_issue.add_to_labels(github_label("imported"))
# Add additional tags based on the issue's state
if state in STATE_MAPPING:
github_issue.add_to_labels(github_label(state))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
+ if label.text.startswith("Priority-") and options.omit_priority:
+ continue
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = comment.content.text
body = github_escape("_From %s on %s:_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
+ parser.add_option("-p", "--omit-priority", action = "store_true", dest = "omit_priority", help = "Don't migrate priority labels", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d38aea890a1450c588e3e5e2c47791c6d51bac51
|
Properly escape a comment's full body.
|
diff --git a/migrateissues.py b/migrateissues.py
index 50e45a7..fd5b65a 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,262 +1,262 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
# Mapping from Google Code issue states to Github tags
STATE_MAPPING = {
'invalid': "invalid",
'duplicate': "duplicate"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
state = issue.state.text
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = state)
# Add an 'imported' tag so it's easy to identify issues that we created
github_issue.add_to_labels(github_label("imported"))
# Add additional tags based on the issue's state
if state in STATE_MAPPING:
github_issue.add_to_labels(github_label(state))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
- content = github_escape(comment.content.text)
+ content = comment.content.text
- body = "_From %s on %s:_\n%s" % (author, date, content)
+ body = github_escape("_From %s on %s:_\n%s" % (author, date, content))
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
3ed920c8542157cc898fa31b38d95b3c4c0ebfec
|
Added additional tags based on an issue's state.
|
diff --git a/migrateissues.py b/migrateissues.py
index e60011c..50e45a7 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,252 +1,262 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
-# Mapping from Google Code issue types to Github labels
+# Mapping from Google Code issue labels to Github tags
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
+# Mapping from Google Code issue states to Github tags
+
+STATE_MAPPING = {
+ 'invalid': "invalid",
+ 'duplicate': "duplicate"
+}
+
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def github_label(name, color = "FFFFFF"):
""" Returns the Github label with the given name, creating it if necessary. """
try: return github_repo.get_label(name)
except GithubException:
return github_repo.create_label(name, color)
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
+ state = issue.state.text
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
- github_issue.edit(state = issue.state.text)
+ github_issue.edit(state = state)
# Add an 'imported' tag so it's easy to identify issues that we created
- try: import_label = github_repo.get_label("imported")
- except GithubException:
- import_label = github_repo.create_label("imported", "FFFFFF")
github_issue.add_to_labels(github_label("imported"))
+ # Add additional tags based on the issue's state
+
+ if state in STATE_MAPPING:
+ github_issue.add_to_labels(github_label(state))
+
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = github_escape(comment.content.text)
body = "_From %s on %s:_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
1c90732ed3e1a3990258cc5624650e37d7b42f2f
|
Factored out Github label retrieval/creation.
|
diff --git a/migrateissues.py b/migrateissues.py
index 4c6386c..e60011c 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,246 +1,252 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue types to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
+def github_label(name, color = "FFFFFF"):
+
+ """ Returns the Github label with the given name, creating it if necessary. """
+
+ try: return github_repo.get_label(name)
+ except GithubException:
+ return github_repo.create_label(name, color)
+
+
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = issue.state.text)
# Add an 'imported' tag so it's easy to identify issues that we created
try: import_label = github_repo.get_label("imported")
except GithubException:
import_label = github_repo.create_label("imported", "FFFFFF")
- github_issue.add_to_labels(import_label)
+ github_issue.add_to_labels(github_label("imported"))
# Assigns issues that originally had an owner to the current user
if issue.owner and options.assign_owner and not options.dry_run:
github_issue.edit(assignee = github.get_user(github_user.login))
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
- try: github_label = github_repo.get_label(label_text)
- except GithubException:
- github_label = github_repo.create_label(label_text, "FFFFFF")
- github_issue.add_to_labels(github_label)
+ github_issue.add_to_labels(github_label(label_text))
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = github_escape(comment.content.text)
body = "_From %s on %s:_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_user = github.get_user()
github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
395ddd601c35418ce371111da1a4c4029657bd46
|
Added an option to auto-assign owned issues.
|
diff --git a/migrateissues.py b/migrateissues.py
index 887530a..4c6386c 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,238 +1,246 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue types to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = issue.state.text)
+ # Add an 'imported' tag so it's easy to identify issues that we created
+
try: import_label = github_repo.get_label("imported")
except GithubException:
import_label = github_repo.create_label("imported", "FFFFFF")
github_issue.add_to_labels(import_label)
+ # Assigns issues that originally had an owner to the current user
+
+ if issue.owner and options.assign_owner and not options.dry_run:
+ github_issue.edit(assignee = github.get_user(github_user.login))
+
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
try: github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, "FFFFFF")
github_issue.add_to_labels(github_label)
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
- output("\n")
-
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = github_escape(comment.content.text)
body = "_From %s on %s:_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
+ output("\n")
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
+ parser.add_option("-a", "--assign-owner", action = "store_true", dest = "assign_owner", help = "Assign owned tickets to the Github user", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
- github_repo = github.get_user().get_repo(github_project)
+ github_user = github.get_user()
+ github_repo = github_user.get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
5bd8f4958882095b1ef753d2ff2b65e3be962513
|
Removed dead code
|
diff --git a/migrateissues.py b/migrateissues.py
index 194691b..887530a 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,241 +1,238 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import getpass
from datetime import datetime
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue types to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
def parse_gcode_date(date_text):
""" Transforms a Google Code date into """
parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
return parsed.strftime("%B %d, %Y %H:%M:%S")
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = issue.state.text)
try: import_label = github_repo.get_label("imported")
except GithubException:
import_label = github_repo.create_label("imported", "FFFFFF")
github_issue.add_to_labels(import_label)
- #if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
- # github_issue.edit(state="closed")
-
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
try: github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, "FFFFFF")
github_issue.add_to_labels(github_label)
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
output("\n")
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = parse_gcode_date(comment.published.text)
content = github_escape(comment.content.text)
body = "_From %s on %s:_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
df68274c71f1dfa9c3c35374b0d9e2f5829bdd1c
|
Removed dependency on dateutil
|
diff --git a/migrateissues.py b/migrateissues.py
index b42fce0..194691b 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,232 +1,241 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
-import dateutil.parser
import getpass
+from datetime import datetime
+
from github import Github
from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
-logging.basicConfig(level=logging.ERROR)
+logging.basicConfig(level = logging.ERROR)
# The maximum number of records to retrieve from Google Code in a single request
GOOGLE_MAX_RESULTS = 25
# Mapping from Google Code issue types to Github labels
LABEL_MAPPING = {
'Type-Defect' : "bug",
'Type-Enhancement' : "enhancement"
}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def github_escape(string):
""" Returns a copy of the string sanitized for use in Github. """
return string.replace("%", "%")
def parse_gcode_id(id_text):
""" Returns the numeric part of a Google Code ID string. """
return re.search("\d+$", id_text).group(0)
+def parse_gcode_date(date_text):
+
+ """ Transforms a Google Code date into """
+
+ parsed = datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.000Z")
+ return parsed.strftime("%B %d, %Y %H:%M:%S")
+
+
def add_issue_to_github(issue):
""" Migrates the given Google Code issue to Github. """
gid = parse_gcode_id(issue.id.text)
title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
- date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
+ date = parse_gcode_date(issue.published.text)
header = "_Original author: %s (%s)_" % (author, date)
body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
github_issue.edit(state = issue.state.text)
try: import_label = github_repo.get_label("imported")
except GithubException:
import_label = github_repo.create_label("imported", "FFFFFF")
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
# github_issue.edit(state="closed")
# Convert Google Code labels to Github tags where possible
if issue.label:
output(", adding labels")
for label in issue.label:
label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
try: github_label = github_repo.get_label(label_text)
except GithubException:
github_label = github_repo.create_label(label_text, "FFFFFF")
github_issue.add_to_labels(github_label)
output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
""" Migrates all comments from a Google Code issue to its Github copy. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
# Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
# Filter out empty comments
comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
# Filter out any comments that already exist in Github and are tagged as imported
existing_comments = github_issue.get_comments()
existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
# Add any remaining comments to the Github issue
if not comments:
break
if start_index == 1:
output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
output(".")
start_index += max_results
output("\n")
def add_comment_to_github(comment, github_issue):
""" Adds a single Google Code comment to the given Github issue. """
gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
- date = dateutil.parser.parse(comment.published.text).strftime("%B %d, %Y %H:%M:%S")
+ date = parse_gcode_date(comment.published.text)
content = github_escape(comment.content.text)
body = "_From %s on %s:_\n%s" % (author, date, content)
logging.info("Adding comment %s", gid)
if not options.dry_run:
github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
""" Migrates all Google Code issues in the given dictionary to Github. """
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
issues_feed = google.get_issues(google_project_name, query = query)
if not issues_feed.entry:
break
for issue in issues_feed.entry:
gid = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
output("Not adding issue %s (exists)" % gid)
else: github_issue = add_issue_to_github(issue)
if github_issue:
add_comments_to_issue(github_issue, gid)
start_index += max_results
def get_existing_github_issues():
""" Returns a dictionary of Github issues previously migrated from Google Code.
The result maps issue titles to their Github issue objects.
"""
try:
open_issues = list(github_repo.get_issues(state = "open"))
closed_issues = list(github_repo.get_issues(state = "closed"))
issues = open_issues + closed_issues
# We only care about issues marked as 'imported'; ones that we created
existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
# return { str(issue.title): issue for issue in existing_issues } Python 2.7+
except Exception:
return {}
if __name__ == "__main__":
usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage = usage, description = description)
parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
0f4d7076a72e6348440aa6fc2a29625382f5d808
|
Code cleanup and documentation
|
diff --git a/migrateissues.py b/migrateissues.py
index 0ac7a05..b42fce0 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,174 +1,232 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
import getpass
-from github import Github, GithubException
+from github import Github
+from github import GithubException
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
-#GITHUB_REQUESTS_PER_SECOND = 0.5
+logging.basicConfig(level=logging.ERROR)
+
+# The maximum number of records to retrieve from Google Code in a single request
+
GOOGLE_MAX_RESULTS = 25
-logging.basicConfig(level=logging.ERROR)
+# Mapping from Google Code issue types to Github labels
+
+LABEL_MAPPING = {
+ 'Type-Defect' : "bug",
+ 'Type-Enhancement' : "enhancement"
+}
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
+def github_escape(string):
+
+ """ Returns a copy of the string sanitized for use in Github. """
+
+ return string.replace("%", "%")
+
+
def parse_gcode_id(id_text):
- return re.search('\d+$', id_text).group(0)
+
+ """ Returns the numeric part of a Google Code ID string. """
+
+ return re.search("\d+$", id_text).group(0)
def add_issue_to_github(issue):
- id = parse_gcode_id(issue.id.text)
- title = issue.title.text
+
+ """ Migrates the given Google Code issue to Github. """
+
+ gid = parse_gcode_id(issue.id.text)
+ title = github_escape(issue.title.text)
link = issue.link[1].href
author = issue.author[0].name.text
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
- header = '_Original author: %s (%s)_' % (author, date)
- body = '%s\n\n%s\n\n\n_Original issue: %s_' % (header, content, link)
- # Github takes issue with % in the title or body.
- title = title.replace('%', '%')
- body = body.replace('%', '%')
+ header = "_Original author: %s (%s)_" % (author, date)
+ body = github_escape("%s\n\n%s\n\n\n_Original issue: %s_" % (header, content, link))
- output('Adding issue %s' % (id))
+ output("Adding issue %s" % gid)
github_issue = None
if not options.dry_run:
- github_issue = github_repo.create_issue(title, body=body.encode('utf-8'))
- github_issue.edit(state=issue.state.text)
- try:
- import_label = github_repo.get_label('imported')
+
+ github_issue = github_repo.create_issue(title, body = body.encode("utf-8"))
+ github_issue.edit(state = issue.state.text)
+
+ try: import_label = github_repo.get_label("imported")
except GithubException:
- import_label = github_repo.create_label('imported', 'FFFFFF')
+ import_label = github_repo.create_label("imported", "FFFFFF")
github_issue.add_to_labels(import_label)
#if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
- # github_issue.edit(state='closed')
- else:
- # don't actually open an issue during a dry run...
- class blank:
- def get_comments(self):
- return []
- github_issue = blank()
-
- # Add any labels
- label_mapping = {'Type-Defect' : 'bug', 'Type-Enhancement' : 'enhancement'}
- if len(issue.label) > 0:
- output(', adding labels')
+ # github_issue.edit(state="closed")
+
+ # Convert Google Code labels to Github tags where possible
+
+ if issue.label:
+ output(", adding labels")
for label in issue.label:
- # get github equivalent if it exists
- label_text = label_mapping.get(label.text, label.text)
+ label_text = LABEL_MAPPING.get(label.text, label.text)
if not options.dry_run:
- try:
- github_label = github_repo.get_label(label_text)
+ try: github_label = github_repo.get_label(label_text)
except GithubException:
- github_label = github_repo.create_label(label_text, 'FFFFFF')
+ github_label = github_repo.create_label(label_text, "FFFFFF")
github_issue.add_to_labels(github_label)
- output('.')
+ output(".")
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
- # Add any comments
+
+ """ Migrates all comments from a Google Code issue to its Github copy. """
+
start_index = 1
max_results = GOOGLE_MAX_RESULTS
+
+ # Retrieve comments in blocks of GOOGLE_MAX_RESULTS until there are none left
+
while True:
- comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
- comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
+
+ query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
+ comments_feed = google.get_comments(google_project_name, gcode_issue_id, query = query)
+
+ # Filter out empty comments
+
+ comments = [ comment for comment in comments_feed.entry if comment.content.text is not None ]
+
+ # Filter out any comments that already exist in Github and are tagged as imported
+
existing_comments = github_issue.get_comments()
- existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
- existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
- comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
- if len(comments) == 0:
+ existing_comments = [ comment for comment in existing_comments if comment.body[0:5] == "_From" ]
+ existing_comments = [ re.sub(r"^_From.+_\n", "", comment.body) for comment in existing_comments ]
+ comments = [ comment for comment in comments if comment.content.text not in existing_comments ]
+
+ # Add any remaining comments to the Github issue
+
+ if not comments:
break
if start_index == 1:
- output(', adding comments')
+ output(", adding comments")
for comment in comments:
add_comment_to_github(comment, github_issue)
- output('.')
+ output(".")
start_index += max_results
- output('\n')
+
+ output("\n")
def add_comment_to_github(comment, github_issue):
- id = parse_gcode_id(comment.id.text)
+
+ """ Adds a single Google Code comment to the given Github issue. """
+
+ gid = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
- date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
- content = comment.content.text
- content = content.replace('%', '%') # Github chokes on % in the payload
- body = '_From %s on %s:_\n%s' % (author, date, content)
+ date = dateutil.parser.parse(comment.published.text).strftime("%B %d, %Y %H:%M:%S")
+ content = github_escape(comment.content.text)
+
+ body = "_From %s on %s:_\n%s" % (author, date, content)
- logging.info('Adding comment %s', id)
+ logging.info("Adding comment %s", gid)
if not options.dry_run:
- github_issue.create_comment(body.encode('utf-8'))
+ github_issue.create_comment(body.encode("utf-8"))
def process_gcode_issues(existing_issues):
+
+ """ Migrates all Google Code issues in the given dictionary to Github. """
+
start_index = 1
max_results = GOOGLE_MAX_RESULTS
+
while True:
- issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
- if len(issues_feed.entry) == 0:
+
+ query = gdata.projecthosting.client.Query(start_index = start_index, max_results = max_results)
+ issues_feed = google.get_issues(google_project_name, query = query)
+
+ if not issues_feed.entry:
break
+
for issue in issues_feed.entry:
- id = parse_gcode_id(issue.id.text)
- if issue.title.text in existing_issues.keys():
+ gid = parse_gcode_id(issue.id.text)
+ if issue.title.text in existing_issues:
github_issue = existing_issues[issue.title.text]
- output('Not adding issue %s (exists)' % (id))
- else:
- github_issue = add_issue_to_github(issue)
- add_comments_to_issue(github_issue, id)
+ output("Not adding issue %s (exists)" % gid)
+ else: github_issue = add_issue_to_github(issue)
+ if github_issue:
+ add_comments_to_issue(github_issue, gid)
start_index += max_results
def get_existing_github_issues():
+
+ """ Returns a dictionary of Github issues previously migrated from Google Code.
+
+ The result maps issue titles to their Github issue objects.
+
+ """
+
try:
- existing_issues = list(github_repo.get_issues(state='open')) + list(github_repo.get_issues(state='closed'))
- existing_issues = filter(lambda i: 'imported' in [l.name for l in i.get_labels()], existing_issues)
- existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
- except:
- existing_issues = {}
- return existing_issues
+
+ open_issues = list(github_repo.get_issues(state = "open"))
+ closed_issues = list(github_repo.get_issues(state = "closed"))
+ issues = open_issues + closed_issues
+
+ # We only care about issues marked as 'imported'; ones that we created
+
+ existing_issues = [ issue for issue in issues if "imported" in [ label.name for label in issue.get_labels() ] ]
+ return dict(zip([ str(issue.title) for issue in existing_issues ], existing_issues))
+ # return { str(issue.title): issue for issue in existing_issues } Python 2.7+
+
+ except Exception:
+ return {}
+
if __name__ == "__main__":
- usage = "usage: %prog [options] <google_project_name> <github_user_name> <github_project>"
+
+ usage = "usage: %prog [options] <google project name> <github username> <github project>"
description = "Migrate all issues from a Google Code project to a Github project."
- parser = optparse.OptionParser(usage=usage, description=description)
- parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
+ parser = optparse.OptionParser(usage = usage, description = description)
+
+ parser.add_option("-d", "--dry-run", action = "store_true", dest = "dry_run", help = "Don't modify anything on Github", default = False)
+
options, args = parser.parse_args()
if len(args) != 3:
parser.print_help()
sys.exit()
google_project_name, github_user_name, github_project = args
- github_password = getpass.getpass('Github password: ')
+ github_password = getpass.getpass("Github password: ")
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(github_user_name, github_password)
github_repo = github.get_user().get_repo(github_project)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
- except:
+ except Exception:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
a04232c9d0ad4503b990381afa7be0647530f2a8
|
Revise sync comment in README.md
|
diff --git a/README.md b/README.md
index 400121b..8b650f0 100644
--- a/README.md
+++ b/README.md
@@ -1,23 +1,23 @@
This is a simple script to move issues from google code to github.
Some liberties have been taken (as we cannot, for example, know which google user corresponds to other user on github). But most information is complete.
-This script can be run repeatedly and will just pull in new issues and new comments without mucking up what's already on github.
+This script can be run repeatedly and will just pull in new issues and new comments from Google Code without mucking up what's already on github.
Required Python libraries:
* [gdata](http://code.google.com/p/gdata-python-client/) -- `pip install gdata`
* [python github](http://github.com/ask/python-github2) -- `pip install github2`
Usage:
migrate-issues.py [options] <google_project_name> <github_api_token> <github_user_name> <github_project>
google_project_name The project name (from the URL) from google code
github_api_token Your Github api token
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help show this help message and exit
-d, --dry-run don't modify anything on Github
|
arthur-debert/google-code-issues-migrator
|
b169f7b5029c858441d995726a9396bcb4e05842
|
Mention sync ability in README.md
|
diff --git a/README.md b/README.md
index d0299f5..400121b 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,23 @@
This is a simple script to move issues from google code to github.
Some liberties have been taken (as we cannot, for example, know which google user corresponds to other user on github). But most information is complete.
+This script can be run repeatedly and will just pull in new issues and new comments without mucking up what's already on github.
+
Required Python libraries:
* [gdata](http://code.google.com/p/gdata-python-client/) -- `pip install gdata`
* [python github](http://github.com/ask/python-github2) -- `pip install github2`
Usage:
migrate-issues.py [options] <google_project_name> <github_api_token> <github_user_name> <github_project>
google_project_name The project name (from the URL) from google code
github_api_token Your Github api token
github_user_name The Github username
github_project The Github project name, e.g. username/project
Options:
-h, --help show this help message and exit
-d, --dry-run don't modify anything on Github
|
arthur-debert/google-code-issues-migrator
|
99db59417e4a21a99688e7cd1330b0255333404a
|
Fixed some already-migrated issues getting migrated again on subsequent runs.
|
diff --git a/migrate-issues.py b/migrate-issues.py
index 4e53a54..c1396a6 100644
--- a/migrate-issues.py
+++ b/migrate-issues.py
@@ -1,144 +1,145 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
from github2.client import Github
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
logging.basicConfig(level=logging.ERROR)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
body = '%s\n\n\n_Original issue: %s (%s)_' % (content, link, date)
output('Adding issue %s' % (id))
github_issue = None
if not options.dry_run:
github_issue = github.issues.open(github_project, title=title, body=body.encode('utf-8'))
github.issues.add_label(github_project, github_issue.number, "imported")
if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
github.issues.close(github_project, github_issue.number)
# Add any labels
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
if not options.dry_run:
github.issues.add_label(github_project, github_issue.number, label.text.encode('utf-8'))
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github.issues.comments(github_project, github_issue.number)
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if not options.dry_run:
github.issues.comment(github_project, github_issue.number, body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
id = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues.keys():
github_issue = existing_issues[issue.title.text]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
def get_existing_github_issues():
try:
- existing_issues = github.issues.list_by_label(github_project, "imported") # only include github issues labeled "imported" in our duplicate checking
+ existing_issues = github.issues.list(github_project, 'open') + github.issues.list(github_project, 'closed')
+ existing_issues = filter(lambda i: 'imported' in i.labels, existing_issues)
existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
except:
existing_issues = {}
return existing_issues
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_api_token> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 4:
parser.print_help()
sys.exit()
google_project_name, github_api_token, github_user_name, github_project = args
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(username=github_user_name, api_token=github_api_token, requests_per_second=GITHUB_REQUESTS_PER_SECOND)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
966daf1108ba42fdf55528559c31d50bfd8042f2
|
Tidying up
|
diff --git a/migrate-issues.py b/migrate-issues.py
index 0892434..4e53a54 100644
--- a/migrate-issues.py
+++ b/migrate-issues.py
@@ -1,144 +1,144 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
from github2.client import Github
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
logging.basicConfig(level=logging.ERROR)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
-def parse_gcode_issue_id(id_text):
+def parse_gcode_id(id_text):
return re.search('\d+$', id_text).group(0)
def add_issue_to_github(issue):
- id = parse_gcode_issue_id(issue.id.text)
+ id = parse_gcode_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
body = '%s\n\n\n_Original issue: %s (%s)_' % (content, link, date)
output('Adding issue %s' % (id))
github_issue = None
- if options.dry_run is False:
+ if not options.dry_run:
github_issue = github.issues.open(github_project, title=title, body=body.encode('utf-8'))
github.issues.add_label(github_project, github_issue.number, "imported")
if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
github.issues.close(github_project, github_issue.number)
# Add any labels
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
- if options.dry_run is False:
+ if not options.dry_run:
github.issues.add_label(github_project, github_issue.number, label.text.encode('utf-8'))
output('.')
return github_issue
def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
existing_comments = github.issues.comments(github_project, github_issue.number)
existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
- id = parse_gcode_issue_id(comment.id.text)
+ id = parse_gcode_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
- if options.dry_run is False:
+ if not options.dry_run:
github.issues.comment(github_project, github_issue.number, body.encode('utf-8'))
def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
- id = parse_gcode_issue_id(issue.id.text)
+ id = parse_gcode_id(issue.id.text)
if issue.title.text in existing_issues.keys():
github_issue = existing_issues[issue.title.text]
output('Not adding issue %s (exists)' % (id))
else:
github_issue = add_issue_to_github(issue)
add_comments_to_issue(github_issue, id)
start_index += max_results
def get_existing_github_issues():
try:
- existing_issues = github.issues.list_by_label(github_project, "imported")
+ existing_issues = github.issues.list_by_label(github_project, "imported") # only include github issues labeled "imported" in our duplicate checking
existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
except:
existing_issues = {}
return existing_issues
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_api_token> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 4:
parser.print_help()
sys.exit()
google_project_name, github_api_token, github_user_name, github_project = args
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(username=github_user_name, api_token=github_api_token, requests_per_second=GITHUB_REQUESTS_PER_SECOND)
try:
existing_issues = get_existing_github_issues()
process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
a64af89bbf58efb8d4cd3bd2ca376fb4bf58242f
|
Prevent posting duplicate issues or comments on repeated runs.
|
diff --git a/migrate-issues.py b/migrate-issues.py
index be3c671..0892434 100644
--- a/migrate-issues.py
+++ b/migrate-issues.py
@@ -1,118 +1,144 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
from github2.client import Github
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
logging.basicConfig(level=logging.ERROR)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
+def parse_gcode_issue_id(id_text):
+ return re.search('\d+$', id_text).group(0)
+
+
def add_issue_to_github(issue):
- id = re.search('\d+$', issue.id.text).group(0)
+ id = parse_gcode_issue_id(issue.id.text)
title = issue.title.text
link = issue.link[1].href
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
body = '%s\n\n\n_Original issue: %s (%s)_' % (content, link, date)
output('Adding issue %s' % (id))
github_issue = None
- if options.dry_run is not True:
+ if options.dry_run is False:
github_issue = github.issues.open(github_project, title=title, body=body.encode('utf-8'))
github.issues.add_label(github_project, github_issue.number, "imported")
-
- if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
- if options.dry_run is not True:
+ if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
github.issues.close(github_project, github_issue.number)
# Add any labels
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
- if options.dry_run is not True:
+ if options.dry_run is False:
github.issues.add_label(github_project, github_issue.number, label.text.encode('utf-8'))
output('.')
+ return github_issue
+
+
+def add_comments_to_issue(github_issue, gcode_issue_id):
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
- comments_feed = google.get_comments(google_project_name, id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
- comments = filter(lambda e: e.content.text is not None, comments_feed.entry)
+ comments_feed = google.get_comments(google_project_name, gcode_issue_id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
+ comments = filter(lambda c: c.content.text is not None, comments_feed.entry) # exclude empty comments
+ existing_comments = github.issues.comments(github_project, github_issue.number)
+ existing_comments = filter(lambda c: c.body[0:5] == '_From', existing_comments) # only look at existing github comments that seem to have been imported
+ existing_comments = map(lambda c: re.sub(r'^_From.+_\n', '', c.body), existing_comments) # get the existing comments' bodies as they appear in gcode
+ comments = filter(lambda c: c.content.text not in existing_comments, comments) # exclude gcode comments that already exist in github
if len(comments) == 0:
break
if start_index == 1:
output(', adding comments')
for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
- id = re.search('\d+$', comment.id.text).group(0)
+ id = parse_gcode_issue_id(comment.id.text)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
- if options.dry_run is not True:
+ if options.dry_run is False:
github.issues.comment(github_project, github_issue.number, body.encode('utf-8'))
-def process_issues():
+def process_gcode_issues(existing_issues):
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
- add_issue_to_github(issue)
+ id = parse_gcode_issue_id(issue.id.text)
+ if issue.title.text in existing_issues.keys():
+ github_issue = existing_issues[issue.title.text]
+ output('Not adding issue %s (exists)' % (id))
+ else:
+ github_issue = add_issue_to_github(issue)
+ add_comments_to_issue(github_issue, id)
start_index += max_results
+def get_existing_github_issues():
+ try:
+ existing_issues = github.issues.list_by_label(github_project, "imported")
+ existing_issues = dict(zip([str(i.title) for i in existing_issues], existing_issues))
+ except:
+ existing_issues = {}
+ return existing_issues
+
+
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_api_token> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
- parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github")
+ parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github", default=False)
options, args = parser.parse_args()
if len(args) != 4:
parser.print_help()
sys.exit()
google_project_name, github_api_token, github_user_name, github_project = args
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(username=github_user_name, api_token=github_api_token, requests_per_second=GITHUB_REQUESTS_PER_SECOND)
try:
- process_issues()
+ existing_issues = get_existing_github_issues()
+ process_gcode_issues(existing_issues)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b35a5fc0368c7cd79d95f131614ffc5ff5c78e60
|
Don't post empty comments. Comment header starts with 'From'.
|
diff --git a/migrate-issues.py b/migrate-issues.py
index b2ccce1..be3c671 100644
--- a/migrate-issues.py
+++ b/migrate-issues.py
@@ -1,117 +1,118 @@
#!/usr/bin/env python
import optparse
import sys
import re
import logging
import dateutil.parser
from github2.client import Github
import gdata.projecthosting.client
import gdata.projecthosting.data
import gdata.gauth
import gdata.client
import gdata.data
GITHUB_REQUESTS_PER_SECOND = 0.5
GOOGLE_MAX_RESULTS = 25
logging.basicConfig(level=logging.ERROR)
def output(string):
sys.stdout.write(string)
sys.stdout.flush()
def add_issue_to_github(issue):
id = re.search('\d+$', issue.id.text).group(0)
title = issue.title.text
link = issue.link[1].href
content = issue.content.text
date = dateutil.parser.parse(issue.published.text).strftime('%B %d, %Y %H:%M:%S')
body = '%s\n\n\n_Original issue: %s (%s)_' % (content, link, date)
output('Adding issue %s' % (id))
github_issue = None
if options.dry_run is not True:
github_issue = github.issues.open(github_project, title=title, body=body.encode('utf-8'))
github.issues.add_label(github_project, github_issue.number, "imported")
if issue.status.text.lower() in "invalid closed fixed wontfix verified done duplicate".lower():
if options.dry_run is not True:
github.issues.close(github_project, github_issue.number)
# Add any labels
if len(issue.label) > 0:
output(', adding labels')
for label in issue.label:
if options.dry_run is not True:
github.issues.add_label(github_project, github_issue.number, label.text.encode('utf-8'))
output('.')
# Add any comments
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
comments_feed = google.get_comments(google_project_name, id, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
- if len(comments_feed.entry) == 0:
+ comments = filter(lambda e: e.content.text is not None, comments_feed.entry)
+ if len(comments) == 0:
break
- if start_index is 1:
+ if start_index == 1:
output(', adding comments')
- for comment in comments_feed.entry:
+ for comment in comments:
add_comment_to_github(comment, github_issue)
output('.')
start_index += max_results
output('\n')
def add_comment_to_github(comment, github_issue):
id = re.search('\d+$', comment.id.text).group(0)
author = comment.author[0].name.text
date = dateutil.parser.parse(comment.published.text).strftime('%B %d, %Y %H:%M:%S')
content = comment.content.text
- body = '_%s on %s:_\n%s' % (author, date, content)
+ body = '_From %s on %s:_\n%s' % (author, date, content)
logging.info('Adding comment %s', id)
if options.dry_run is not True:
github.issues.comment(github_project, github_issue.number, body.encode('utf-8'))
def process_issues():
start_index = 1
max_results = GOOGLE_MAX_RESULTS
while True:
issues_feed = google.get_issues(google_project_name, query=gdata.projecthosting.client.Query(start_index=start_index, max_results=max_results))
if len(issues_feed.entry) == 0:
break
for issue in issues_feed.entry:
add_issue_to_github(issue)
start_index += max_results
if __name__ == "__main__":
usage = "usage: %prog [options] <google_project_name> <github_api_token> <github_user_name> <github_project>"
description = "Migrate all issues from a Google Code project to a Github project."
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-d', '--dry-run', action="store_true", dest="dry_run", help="Don't modify anything on Github")
options, args = parser.parse_args()
if len(args) != 4:
parser.print_help()
sys.exit()
google_project_name, github_api_token, github_user_name, github_project = args
google = gdata.projecthosting.client.ProjectHostingClient()
github = Github(username=github_user_name, api_token=github_api_token, requests_per_second=GITHUB_REQUESTS_PER_SECOND)
try:
process_issues()
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d4440486e588aa482f618010e80126747e35df1d
|
Fixed unicode handling wrt BeautifulSupo4. Wrote nicer calls to BeautifulSoup4. Fix merge error in get_status(). Compose g_statusre in a nicer and PEP-8-friendly way.
|
diff --git a/migrateissues.py b/migrateissues.py
index 5f89aa8..f47663a 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,202 +1,201 @@
import csv
import logging
import datetime
import re
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
-g_statusre = \
- '^(' + \
- 'Issue has not had initial review yet' + '|' + \
- 'Problem reproduced \/ Need acknowledged' + '|' + \
- 'Work on this issue has begun' + '|' + \
- 'Waiting on feedback or additional information' + '|' + \
- 'Developer made source code changes, QA should verify' + '|' + \
- 'QA has verified that the fix worked' + '|' + \
- 'This was not a valid issue report' + '|' + \
- 'Unable to reproduce the issue' + '|' + \
- 'This report duplicates an existing issue' + '|' + \
- 'We decided to not take action on this issue' + '|' + \
- 'The requested non-coding task was completed' + \
- ')$'
+g_statusre = '^(' + '|'.join([
+ 'Issue has not had initial review yet',
+ 'Problem reproduced \/ Need acknowledged',
+ 'Work on this issue has begun',
+ 'Waiting on feedback or additional information',
+ 'Developer made source code changes, QA should verify',
+ 'QA has verified that the fix worked',
+ 'This was not a valid issue report',
+ 'Unable to reproduce the issue',
+ 'This report duplicates an existing issue',
+ 'We decided to not take action on this issue',
+ 'The requested non-coding task was completed']) + ')$'
+
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
- return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
+ return ("_From %s, %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
- self.get_original_data()
+ self.get_original_data()
def parse_date(self, node):
- datenode = node.find(attrs={'class' : 'date'})
- datestring = datenode['title']
+ date_string = node.find('span', 'date').attrs['title']
try:
- return datetime.datetime.strptime(datestring, '%a %b %d %H:%M:%S %Y')
+ return datetime.datetime.strptime(date_string, '%a %b %d %H:%M:%S %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now()
def get_user(self, node):
- authornode = node.find(attrs={'class' : 'author'})
- userhrefnode = authornode.find(attrs={'href' : re.compile('^\/u\/')})
+ authornode = node.find(attrs={'class': 'author'})
+ userhrefnode = authornode.find(attrs={'href': re.compile('^\/u\/')})
return userhrefnode.string
def get_body(self, node):
- comment = unicode(node.find('pre').renderContents(), 'utf-8', 'replace')
+ comment = node.pre.decode_contents().encode('utf-8')
return comment
def get_labels(self, soup):
self.labels = []
- self.milestones = [] # Milestones are a form of label in googlecode
- for node in soup.findAll(attrs = { 'class' : 'label' }):
- label = unicode(re.sub('<\/?b>', '', node.renderContents()))
+ self.milestones = [] # Milestones are a form of label in googlecode
+ for node in soup.find_all(attrs={'class': 'label'}):
+ label = re.sub('<\/?b>', '', node.decode_contents()).encode('utf-8')
if re.match('^Milestone-', label):
self.milestones.append(re.sub('^Milestone-', '', label))
else:
self.labels.append(label)
return
def get_status(self, soup):
- node = soup.find(name = 'span', attrs = { 'title' : re.compile(g_statusre) })
- return node.find('pre').string
+ node = soup.find(name='span', attrs={'title': re.compile(g_statusre)})
+ self.status = node.string
self.labels.append("Status-%s" % self.status)
return
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
- descriptionnode = soup.find(attrs={'class' : "cursor_off vt issuedescription"})
- descriptionstring = unicode(descriptionnode.find('pre').renderContents(), 'utf-8', 'replace')
- created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
+ descriptionnode = soup.find(attrs={'class': "cursor_off vt issuedescription"})
+ descriptionstring = descriptionnode.find('pre').decode_contents().encode('utf-8')
+ self.created_at = self.parse_date(soup.find('td', 'vt issuedescription'))
+
self.body = unicode("%s\n\n\n_Original issue: %s (%s)_" % (
- descriptionstring,
- self.original_url,
- self.created_at ))
+ descriptionstring,
+ self.original_url,
+ self.created_at))
comments = []
- for node in soup.findAll(attrs={'class' : "cursor_off vt issuecomment"}):
+ for node in soup.find_all(attrs={'class': "cursor_off vt issuecomment"}):
try:
- date = self.parse_date(node.find('span', 'date').string)
+ date = self.parse_date(node)
author = self.get_user(node)
body = self.get_body(node)
if not re.match('^\\n<i>\(No comment was entered for this change\.\)<\/i>\\n$', body):
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
self.get_labels(soup)
logging.info('got labels %s' % len(self.labels))
logging.info('got milestones %s' % len(self.milestones))
self.get_status(soup)
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified worksforme duplicate done".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
issues.sort(key=lambda i: int(i.id))
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Your Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d7bfaa74eaeeb823485d2823b9ec44a5f312b80b
|
Removed unneeded int() conversion in logging.info line 41
|
diff --git a/migrateissues.py b/migrateissues.py
index 02cc763..f3dc5cf 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,160 +1,160 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
- logging.info("Issue #%s: %s" % (int(self.id), self.summary))
+ logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now()
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
self.body = "%s\n\n\n_Original issue: %s (%s)_" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != '' and "(No comment was entered for this change.)" not in body:
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
issues.sort(key=lambda i: int(i.id))
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
fc9e94546b32c9a814f684529987f72df165dfd7
|
Fixed handling of missing/unparseable dates.
|
diff --git a/migrateissues.py b/migrateissues.py
index b83b348..02cc763 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,167 +1,160 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
- try:
- return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
- except:
- import pdb, sys
- e, m, tb = sys.exc_info()
- pdb.post_mortem(tb)
+ return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (int(self.id), self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
- return datetime.datetime.now
+ return datetime.datetime.now()
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
self.body = "%s\n\n\n_Original issue: %s (%s)_" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != '' and "(No comment was entered for this change.)" not in body:
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
issues.sort(key=lambda i: int(i.id))
- import pdb
- pdb.set_trace()
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
e965c0ff3a2fafe088e65d6d422d38ad460303a5
|
Sort issues by id before posting
|
diff --git a/migrateissues.py b/migrateissues.py
index 1ce433f..b83b348 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,159 +1,167 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
- return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
+ try:
+ return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
+ except:
+ import pdb, sys
+ e, m, tb = sys.exc_info()
+ pdb.post_mortem(tb)
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
- logging.info("Issue #%s: %s" % (self.id, self.summary))
+ logging.info("Issue #%s: %s" % (int(self.id), self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
self.body = "%s\n\n\n_Original issue: %s (%s)_" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != '' and "(No comment was entered for this change.)" not in body:
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
+ issues.sort(key=lambda i: int(i.id))
+ import pdb
+ pdb.set_trace()
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b1b9d5dd71bf860faf2597912c5543f050000732
|
Removed indent before requirements-bullets which prevented linking from working.
|
diff --git a/README.mkd b/README.mkd
index 6f95a38..e94d268 100644
--- a/README.mkd
+++ b/README.mkd
@@ -1,23 +1,23 @@
This is a simple script to move issues from google code to github.
Some liberties have been taken (as we cannot, for example, know which google user corresponds to other user on github). But most information is complete.
Requirements:
- * [httplib2](http://code.google.com/p/httplib2/) -- pip install httplib2
- * [python github](http://github.com/ask/python-github2) -- pip install github2
- * [BeautifulSoup4](http://www.crummy.com/software/BeautifulSoup/bs4/download/4.0/) -- pip install BeautifulSoup4
+* [httplib2](http://code.google.com/p/httplib2/) -- `pip install httplib2`
+* [python github](http://github.com/ask/python-github2) -- `pip install github2`
+* [BeautifulSoup4](http://www.crummy.com/software/BeautifulSoup/bs4/download/4.0/) -- `pip install BeautifulSoup4`
Usage:
migrateissues.py [options]
Options:
-h, --help show this help message and exit
-g GOOGLE_PROJECT_NAME, --google-project-name=GOOGLE_PROJECT_NAME
The project name (from the URL) from google code.
-t GITHUB_API_TOKEN, --github-api-token=GITHUB_API_TOKEN
Yout Github api token
-u GITHUB_USER_NAME, --github-user-name=GITHUB_USER_NAME
The Github username
-p GITHUB_PROJECT, --github-project=GITHUB_PROJECT
The Github project name
|
arthur-debert/google-code-issues-migrator
|
ea336f98239c391e9acc54cc14af32c555770090
|
Corrected Markdown link syntax in readme (I think?)
|
diff --git a/README.mkd b/README.mkd
index 0ca5da7..6f95a38 100644
--- a/README.mkd
+++ b/README.mkd
@@ -1,23 +1,23 @@
This is a simple script to move issues from google code to github.
Some liberties have been taken (as we cannot, for example, know which google user corresponds to other user on github). But most information is complete.
Requirements:
- * (httplib2)[http://code.google.com/p/httplib2/] -- pip install httplib2
- * (python github)[http://github.com/ask/python-github2] -- pip install github2
- * (BeautifulSoup4)[http://www.crummy.com/software/BeautifulSoup/bs4/download/4.0/] -- pip install BeautifulSoup4
+ * [httplib2](http://code.google.com/p/httplib2/) -- pip install httplib2
+ * [python github](http://github.com/ask/python-github2) -- pip install github2
+ * [BeautifulSoup4](http://www.crummy.com/software/BeautifulSoup/bs4/download/4.0/) -- pip install BeautifulSoup4
Usage:
migrateissues.py [options]
Options:
-h, --help show this help message and exit
-g GOOGLE_PROJECT_NAME, --google-project-name=GOOGLE_PROJECT_NAME
The project name (from the URL) from google code.
-t GITHUB_API_TOKEN, --github-api-token=GITHUB_API_TOKEN
Yout Github api token
-u GITHUB_USER_NAME, --github-user-name=GITHUB_USER_NAME
The Github username
-p GITHUB_PROJECT, --github-project=GITHUB_PROJECT
The Github project name
|
arthur-debert/google-code-issues-migrator
|
57465945ac746fdc66f99d59f89d202a85758164
|
Added BeautifulSoup4 to list of lib requirements and appended pip install hints.
|
diff --git a/README.mkd b/README.mkd
index 4a033aa..0ca5da7 100644
--- a/README.mkd
+++ b/README.mkd
@@ -1,22 +1,23 @@
This is a simple script to move issues from google code to github.
Some liberties have been taken (as we cannot, for example, know which google user corresponds to other user on github). But most information is complete.
Requirements:
- * (httplib2)[http://code.google.com/p/httplib2/]
- * (python github)[http://github.com/ask/python-github2]
-
+ * (httplib2)[http://code.google.com/p/httplib2/] -- pip install httplib2
+ * (python github)[http://github.com/ask/python-github2] -- pip install github2
+ * (BeautifulSoup4)[http://www.crummy.com/software/BeautifulSoup/bs4/download/4.0/] -- pip install BeautifulSoup4
+
Usage:
migrateissues.py [options]
Options:
-h, --help show this help message and exit
-g GOOGLE_PROJECT_NAME, --google-project-name=GOOGLE_PROJECT_NAME
The project name (from the URL) from google code.
-t GITHUB_API_TOKEN, --github-api-token=GITHUB_API_TOKEN
Yout Github api token
-u GITHUB_USER_NAME, --github-user-name=GITHUB_USER_NAME
The Github username
-p GITHUB_PROJECT, --github-project=GITHUB_PROJECT
The Github project name
|
arthur-debert/google-code-issues-migrator
|
c89b883fff0a6716db1dffa1805c4b9c4f9a8ca4
|
Empty comments really won't get added now. Italicized 'Original issue:...' line.
|
diff --git a/migrateissues.py b/migrateissues.py
index 7641816..1ce433f 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,159 +1,159 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
- self.body = "%s\n\n\nOriginal issue: %s (%s)" % (
+ self.body = "%s\n\n\n_Original issue: %s (%s)_" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
- if body != '' and body != "(No comment was entered for this change.)\n":
+ if body != '' and "(No comment was entered for this change.)" not in body:
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
b94bd11fd1dfdc98b83ad0029e2d3ff707f0b8ac
|
Fixed code that excludes empty comments. Reworded 'Original' to 'Original issue:'.
|
diff --git a/migrateissues.py b/migrateissues.py
index ed6ab1c..7641816 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,159 +1,159 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
- self.body = "%s\n\n\nOriginal: %s (%s)" % (
+ self.body = "%s\n\n\nOriginal issue: %s (%s)" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
- if body != '' and body != "(No comment was entered for this change.)":
+ if body != '' and body != "(No comment was entered for this change.)\n":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
9255365b3fc4eff82560431220826fd24e937164
|
Remove print debugs.
|
diff --git a/migrateissues.py b/migrateissues.py
index 4bb54d8..ed6ab1c 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,161 +1,159 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
- print soup.find('td', 'vt issuedescription').find('span', 'date').string
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
self.body = "%s\n\n\nOriginal: %s (%s)" % (
soup.find('td', 'vt issuedescription').find('pre').text,
self.original_url,
created_at.strftime('%Y-%m-%d')
)
- print self.body
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != '' and body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
9b6cf76820db9d61198b64f47ca7de5da285e0b7
|
Improved date formatting to drop unavailable time portion which always printed as 00:00:00. Modified 'Original link' to just 'Original' and tacked on the original issue date.
|
diff --git a/migrateissues.py b/migrateissues.py
index c23ea32..4bb54d8 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,160 +1,161 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
- return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
+ return ("_%s - %s_\n%s" % (self.author, self.created_at.strftime('%Y-%m-%d'), self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
+ print soup.find('td', 'vt issuedescription').find('span', 'date').string
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
- self.body = "%s\n\nOriginal date: %s\nOriginal link: %s" % (
+ self.body = "%s\n\n\nOriginal: %s (%s)" % (
soup.find('td', 'vt issuedescription').find('pre').text,
- created_at,
- self.original_url
+ self.original_url,
+ created_at.strftime('%Y-%m-%d')
)
print self.body
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != '' and body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
23b0f8cf73ea3c244aeb6c8b6a21cfbd37605ff9
|
Exclude empty comments.
|
diff --git a/migrateissues.py b/migrateissues.py
index e6527cc..c23ea32 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,160 +1,160 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
self.body = "%s\n\nOriginal date: %s\nOriginal link: %s" % (
soup.find('td', 'vt issuedescription').find('pre').text,
created_at,
self.original_url
)
print self.body
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
- if body != "(No comment was entered for this change.)":
+ if body != '' and body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
6b359743882cb305046a854a79ea968b38327525
|
Put create date into issue text since Github API won't let us give it a create date.
|
diff --git a/migrateissues.py b/migrateissues.py
index c351085..e6527cc 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,156 +1,160 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
- self.body = "%s\n\nOriginal link: %s" % (soup.find('td', 'vt issuedescription').find('pre').text, self.original_url)
- self.created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
+ created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
+ self.body = "%s\n\nOriginal date: %s\nOriginal link: %s" % (
+ soup.find('td', 'vt issuedescription').find('pre').text,
+ created_at,
+ self.original_url
+ )
+ print self.body
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
- body=issue.body,
- created_at=issue.created_at
+ body=issue.body
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
d74ff51b37a80b367f65923b9d5d21e1683f9d1b
|
Another date fix.
|
diff --git a/migrateissues.py b/migrateissues.py
index 5e81be8..c351085 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,157 +1,156 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
self.body = "%s\n\nOriginal link: %s" % (soup.find('td', 'vt issuedescription').find('pre').text, self.original_url)
self.created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
- if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
+ if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body,
- created_at=created_at
+ created_at=issue.created_at
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
- # issue status changes have empty bodies in google code , exclude those:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
99823f3627ec025e28ec4e50d7e953806a8bbccf
|
Further lower Github req/sec rate to 0.50 to try and avoid API rate limit error.
|
diff --git a/migrateissues.py b/migrateissues.py
index cad61e6..5e81be8 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,157 +1,157 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
def parse_date(self, date_string):
try:
return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
self.body = "%s\n\nOriginal link: %s" % (soup.find('td', 'vt issuedescription').find('pre').text, self.original_url)
self.created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
if body != "(No comment was entered for this change.)":
# only add comments that are actual comments.
comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
- github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.75)
+ github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.50)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
body=issue.body,
created_at=created_at
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
# issue status changes have empty bodies in google code , exclude those:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
arthur-debert/google-code-issues-migrator
|
512154b0b066157cd94401fc140d22607b939666
|
Don't post comments without actual written comment text. Cleanup of date code.
|
diff --git a/migrateissues.py b/migrateissues.py
index aa9d221..cad61e6 100644
--- a/migrateissues.py
+++ b/migrateissues.py
@@ -1,158 +1,157 @@
import csv
import logging
import datetime
from StringIO import StringIO
import httplib2
from github2.client import Github
from bs4 import BeautifulSoup
options = None
logging.basicConfig(level=logging.DEBUG)
def get_url_content(url):
h = httplib2.Http(".cache")
resp, content = h.request(url, "GET")
return content
class IssueComment(object):
def __init__(self, date, author, body):
self.created_at = date
self.body_raw = body
self.author = author
self.user = options.github_user_name
@property
def body(self):
return ("_%s - %s_\n%s" % (self.author, self.created_at, self.body_raw)).encode('utf-8')
def __repr__(self):
return self.body.encode('utf-8')
class Issue(object):
def __init__(self, issue_line):
for k, v in issue_line.items():
setattr(self, k.lower(), v)
logging.info("Issue #%s: %s" % (self.id, self.summary))
self.get_original_data()
- def parse_date(self, node):
- created_at_raw = node.find('span', 'date').string
+ def parse_date(self, date_string):
try:
- return datetime.datetime.strptime(created_at_raw, '%b %d, %Y')
+ return datetime.datetime.strptime(date_string, '%b %d, %Y')
except ValueError: # if can't parse time, just assume now
return datetime.datetime.now
def get_user(self, node):
return node.find_all('a')[1].string
def get_body(self, node):
return node.find('pre').text
def get_original_data(self):
logging.info("GET %s" % self.original_url)
content = get_url_content(self.original_url)
soup = BeautifulSoup(content)
self.body = "%s\n\nOriginal link: %s" % (soup.find('td', 'vt issuedescription').find('pre').text, self.original_url)
- created_at_raw = soup.find('td', 'vt issuedescription').find('span', 'date').string
- try:
- self.created_at = datetime.datetime.strptime(created_at_raw, '%b %d, %Y')
- except ValueError: # if can't parse time, just assume now
- self.created_at = datetime.datetime.now
+ self.created_at = self.parse_date(soup.find('td', 'vt issuedescription').find('span', 'date').string)
comments = []
for node in soup.find_all('div', "issuecomment"):
try:
- date = self.parse_date(node)
+ date = self.parse_date(node.find('span', 'date').string)
author = self.get_user(node)
body = self.get_body(node)
- comments.append(IssueComment(date, author, body))
+
+ if body != "(No comment was entered for this change.)":
+ # only add comments that are actual comments.
+ comments.append(IssueComment(date, author, body))
except:
pass
self.comments = comments
logging.info('got comments %s' % len(comments))
@property
def original_url(self):
gcode_base_url = "http://code.google.com/p/%s/" % options.google_project_name
return "%sissues/detail?id=%s" % (gcode_base_url, self.id)
def __repr__(self):
return u"%s - %s " % (self.id, self.summary)
def download_issues():
url = "http://code.google.com/p/" + options.google_project_name + "/issues/csv?can=1&q=&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary"
logging.info('Downloading %s' % url)
content = get_url_content(url)
f = StringIO(content)
return f
def post_to_github(issue, sync_comments=True):
logging.info('should post %s', issue)
github = Github(username=options.github_user_name, api_token=options.github_api_token, requests_per_second=0.75)
if issue.status.lower() in "invalid closed fixed wontfix verified".lower():
issue.status = 'closed'
else:
issue.status = 'open'
try:
git_issue = github.issues.show(options.github_project, int(issue.id))
logging.warn("skipping issue : %s" % (issue))
except RuntimeError:
title = issue.summary
logging.info('will post issue:%s' % issue)
logging.info("issue did not exist")
git_issue = github.issues.open(options.github_project,
title=title,
- body=issue.body
+ body=issue.body,
+ created_at=created_at
)
if issue.status == 'closed':
github.issues.close(options.github_project, git_issue.number)
if sync_comments is False:
return git_issue
old_comments = github.issues.comments(options.github_project, git_issue.number)
for i, comment in enumerate(issue.comments):
exists = False
for old_c in old_comments:
# issue status changes have empty bodies in google code , exclude those:
if bool(old_c.body) or old_c.body == comment.body:
exists = True
logging.info("Found comment there, skipping")
break
if not exists:
#logging.info('posting comment %s', comment.body.encode('utf-8'))
try:
github.issues.comment(options.github_project, git_issue.number, comment)
except:
logging.exception("Failed to post comment %s for issue %s" % (i, issue))
return git_issue
def process_issues(issues_csv, sync_comments=True):
reader = csv.DictReader(issues_csv)
issues = [Issue(issue_line) for issue_line in reader]
[post_to_github(i, sync_comments) for i in issues]
if __name__ == "__main__":
import optparse
import sys
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage)
parser.add_option('-g', '--google-project-name', action="store", dest="google_project_name", help="The project name (from the URL) from google code.")
parser.add_option('-t', '--github-api-token', action="store", dest="github_api_token", help="Yout Github api token")
parser.add_option('-u', '--github-user-name', action="store", dest="github_user_name", help="The Github username")
parser.add_option('-p', '--github-project', action="store", dest="github_project", help="The Github project name:: user-name/project-name")
options, args = parser.parse_args(args=sys.argv, values=None)
try:
issues_data = download_issues()
process_issues(issues_data)
except:
parser.print_help()
raise
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.