This is an experimental copy for testing Poikilos' issue mirroring system. Note that Gitea's migration tool can import issues, but the "Issues" checkbox is disabled when "This repository will be a mirror" is enabled (it is for this repo).
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

372 lines
13 KiB

#!/usr/bin/env python3
'''
Title: pyissuesyncd
(c) 2021 Jake "Poikilos" Gustafson
Purpose:
This python-based daemon synchronizes issues (one-way) from one
repository to another.
License:
See the license file in the included EnlivenMinetest directory or at
[EnlivenMinetest](https://github.com/poikilos/EnlivenMinetest)
Outputs:
data_directory: The data directory for this service daemon is
os.path.join(profile, ".cache", "pyissuesyncd").
required arguments:
--dst-repo (or set the DST_REPO environment variable)
Issues and dependent data will be overwritten at this API URL.
optional arguments:
Environment variables get be set, but a CLI argument will override the
corresponding variable noted below in all caps.
The two _CACHE directories below are used as the single_cache option
for the Repo (see enissue.py's Repo class for documentation).
--src-cache: Set the directory to store a cached version of the source repo's data.
* defaults to SRC_CACHE or os.path.join(data_directory, "source")
--dst-cache: Set the directory to store a cached version of the destination repo's data.
* defaults to DST_CACHE or os.path.join(data_directory, "destination")
Examples:
DST_REPO=https://example.com/git/repo pyissuesyncd
pyissuesyncd --dst-repo https://example.com/git/repo
'''
import os
import sys
import json
from datetime import datetime #, timedelta
# see <https://stackoverflow.com/questions/5574702/how-to-print-to-stderr-in-python>
def error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
myFilePath = os.path.realpath(__file__)
me = os.path.basename(__file__)
myDir = os.path.dirname(myFilePath)
try:
import enissue
except ModuleNotFoundError as ex:
PATH = os.environ.get("PATH")
found_d = None
if PATH is not None:
more_paths = PATH.split(os.pathsep)
# ^ pathsep such as ':' (NOT dirsep such as '/'!)
more_paths
for this_d in more_paths:
tryF = os.path.join(this_d, "enissue.py")
if os.path.isfile(tryF):
found_d = this_d
break
if found_d is not None:
sys.path.append(found_d)
print("* detected enissue.py in {}".format(found_d))
# print("path: {}".format(sys.path))
try:
import eni
except ModuleNotFoundError as ex2:
error(ex2)
print("{} must be in the same directory as enissue.py or in"
" PATH".format(me))
sys.exit(1)
else:
print("{} must be in the same directory as enissue.py or in"
" PATH".format(me))
raise ex
from enissue import Repo
data_directory = os.path.join(Repo.profile, ".cache", "pyissuesyncd")
def get_issue(repo, options, issue_no):
results, err = repo.load_issues(
options,
issue_no=issue_no,
)
if results is None:
if err is not None:
if err.get('code') == 410:
# error("The issue was deleted")
pass
elif err.get('code') == 404:
# error("The issue doesn't exist")
pass
return None, err
else:
msg = ("Unknown error: Results should not be None unless"
" there is an error (issue_no={})."
"".format(issue_no))
return (
None,
{
'reason': msg,
}
)
elif not isinstance(results, list):
raise RuntimeError("Results must be a list even if there is"
" only one result.")
elif len(results) > 1:
raise RuntimeError("Results should have"
" only one result.")
issue = results[0]
'''
match = repo.get_match(
mode,
issue_no=issue_no,
match_all_labels_lower=match_all_labels_lower,
)
matching_issue = match['issue']
if matching_issue is not None:
repo.show_issue(
matching_issue,
refresh=False,
never_expire=options.get('never_expire') is True,
)
'''
return issue, None
def start_issuesyncd(src_options, dst_options):
# src_never_expire = src_options.get('never_expire') is True
max_issue = src_options.get('max_issue')
if max_issue is None:
max_issue = 1000
error("WARNING: SRC_MAX_ISSUE set to default: {}"
"".format(max_issue))
else:
max_issue = int(max_issue)
issue_no = 0 # This is incremented to 1 before use.
# issue_no = max_issue - 1 # debug only
src_res_code = 0
end_codes = [404, 403]
# while src_res_code not in end_codes:
while True:
# while (issue_no + 1) <= max_issue: # for debug only
issue_no += 1
if max_issue is not None:
if issue_no > max_issue:
error("* ending due to setting: --src-max-issue={}"
" (can also be set by SRC_MAX_ISSUE env var)"
"".format({}))
break
src_repo = Repo(src_options)
src_issue, err = get_issue(src_repo, src_options, issue_no)
deleted = False
if err is not None:
'''
error("Error accessing source issue {}: {}: {}"
"".format(issue_no, err.get('code'),
err.get('reason')))
'''
src_res_code = err.get('code')
url = err.get('url')
if src_res_code in end_codes:
if src_res_code == 403:
error("#{}: stopping due to error {} ({})"
"".format(issue_no, err.get('code'),
err.get('reason')))
# error(" * reason: {}".format())
# error(" * headers: {}".format(err.get('headers')))
break
elif src_res_code == 404:
error("#{}: Error 404: There is no {}"
" so the end of the issues may have been"
" reached.".format(issue_no, url))
error(" * reason: {}".format(err.get('reason')))
# error(" * headers: {}".format(err.get('headers')))
continue
elif src_res_code == 410:
error("#{}: The issue seems to have been deleted."
"".format(issue_no))
error(" * reason: {}".format(err.get('reason')))
# error(" * headers: {}".format(err.get('headers')))
deleted = False
# TODO: delete on dest (carefully!)
continue
else:
error("#{}: stopping due to error code {}"
"".format(issue_no, src_res_code))
break
else:
error("#{}: continuing anyway but got error code {}"
"".format(issue_no, src_res_code))
if src_issue is None:
if src_res_code not in end_codes:
error("#{}: Skipping due to unprocessed error {}"
"".format(issue_no, src_res_code))
else:
error("#{}: Stopping due to unprocessed error {}"
"".format(issue_no, src_res_code))
continue
else:
error("However, an issue was returned.")
error("Got issue {}".format(issue_no))
# Example: ~/.cache/pyissuesyncd/source/issues/1.json
src_dt_parser = src_repo.options['default_dt_parser']
src_created_dt_s = src_repo.getKnown(src_issue, 'created_at')
src_updated_dt_s = src_repo.getKnown(src_issue, 'updated_at')
src_updated_dt = src_dt_parser(src_updated_dt_s)
src_updated_ts = int(src_updated_dt.strftime("%s"))
# ^ See <https://stackoverflow.com/questions/19801727/convert-
# datetime-to-unix-timestamp-and-convert-it-back-in-python>
'''
print("* src_issue: {} updated: {} = {}"
"".format(issue_no, src_updated_ts, src_updated_dt))
'''
# print(json.dumps(src_issue, indent=2))
# enissue.set_verbose(True)
dst_repo = Repo(dst_options)
dst_issue, err = get_issue(dst_repo, dst_options, issue_no)
if err is not None:
dst_res_code = err.get('code')
url = err.get('url')
'''
if dst_res_code in end_codes:
if dst_res_code == 403:
error("* stopping due to: {}"
"".format(err.get('reason')))
break
elif dst_res_code == 404:
error("* 404: There is no issue {} at {} so the end"
" of the issues may have been reached."
"".format(issue_no, url))
error(" * reason: {}".format(err.get('reason')))
# error(" * headers: {}".format(err.get('headers')))
continue
elif dst_res_code == 410:
error(err.get('reason'))
error("* Issue {} seems to have been deleted."
"".format(issue_no))
continue
break
'''
if dst_issue is None:
# TODO: write the issue
continue
if dst_issue is None:
raise RuntimeError("dst_issue shouldn't be None when error"
" is None.")
dst_dt_parser = dst_repo.options['default_dt_parser']
dst_created_dt_s = dst_repo.getKnown(dst_issue, 'created_at')
dst_updated_dt_s = dst_repo.getKnown(dst_issue, 'updated_at')
dst_updated_dt = dst_dt_parser(dst_updated_dt_s)
dst_updated_ts = int(dst_updated_dt.strftime("%s"))
# ^ See <https://stackoverflow.com/questions/19801727/convert-
# datetime-to-unix-timestamp-and-convert-it-back-in-python>
'''
print("* dst_issue: {} updated: {} = {}"
"".format(issue_no, dst_updated_ts, dst_updated_dt))
'''
# Example: ~/.cache/pyissuesyncd/destination/issues/1.json
# break # for debug only
continue # for debug only
# print(" * dst_issue:")
# print(json.dumps(dst_issue, indent=2))
if err is not None:
if err.get('code') == 404:
# dst_repo.create_issue(src_issue, src_repo)
continue
error("Error accessing destination issue {}: {}: {}"
"".format(issue_no, err.get('code'),
err.get('reason')))
continue
# if issue_differs: # compare timestamp
if True: # for debug only
pass
# dst_repo.update_issue(src_issue, src_repo)
def usage():
print(__doc__)
if __name__ == "__main__":
src_options = {
'repo_url': "https://github.com/poikilos/EnlivenMinetest",
'never_expire': True,
'quiet': True,
'api_id': "GitHub",
}
dst_options = {
'never_expire': True,
'quiet': True,
'api_id': "Gitea",
}
DST_REPO = os.environ.get('DST_REPO')
if DST_REPO is not None:
dst_options['repo_url'] = DST_REPO
del DST_REPO
SRC_REPO = os.environ.get('SRC_REPO')
if DST_REPO is not None:
src_options['repo_url'] = SRC_REPO
del SRC_REPO
SRC_CACHE = os.environ.get('SRC_CACHE')
if SRC_CACHE is None:
SRC_CACHE = os.path.join(data_directory, "source")
DST_CACHE = os.environ.get('DST_CACHE')
if DST_CACHE is None:
DST_CACHE = os.path.join(data_directory, "destination")
SRC_MAX_ISSUE = os.environ.get('SRC_MAX_ISSUE')
prev_arg = None
manual_args = ['--dst-repo', '--src-repo', '--src-cache',
'--dst-cache', '--src-max-issue']
for arg in sys.argv[1:]:
if prev_arg == "--dst-repo":
dst_options['repo_url'] = arg
elif prev_arg == "--src-repo":
src_options['repo_url'] = arg
elif prev_arg == "--src_cache":
SRC_CACHE = arg
elif prev_arg == "--dst_cache":
DST_CACHE = arg
elif prev_arg == "--src-max-issue":
SRC_MAX_ISSUE = int(arg)
elif arg in manual_args:
pass
else:
usage()
error("Error: The argument is not valid: {}".format(arg))
sys.exit(1)
prev_arg = arg
src_options['single_cache'] = SRC_CACHE
src_options['max_issue'] = SRC_MAX_ISSUE
# ^ INFO: start_issuesyncd warns if SRC_MAX_ISSUE is None.
dst_options['single_cache'] = DST_CACHE
error("SRC_REPO (--src-repo) is {}"
"".format(src_options.get('repo_url')))
error("DST_REPO (--dst-repo) is {}"
"".format(dst_options.get('repo_url')))
if dst_options.get('repo_url') is None:
error("Error: You must set DST_REPO in the environment or specify a url after --dst-repo")
sys.exit(1)
start_issuesyncd(src_options, dst_options)