Skip to content

Commit

Permalink
Try to detect #207 as early as possible.
Browse files Browse the repository at this point in the history
  • Loading branch information
glandium committed Sep 24, 2021
1 parent 8292fde commit 3ecabe6
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 5 deletions.
31 changes: 31 additions & 0 deletions cinnabar/githg.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
except ImportError:
from urllib.parse import urlparse
from .exceptions import (
Abort,
AmbiguousGraftAbort,
NothingToGraftException,
OldUpgradeAbort,
Expand Down Expand Up @@ -1306,3 +1307,33 @@ def tagset_lines(tags):
interval_expired('fsck', 86400 * 7):
logging.warn('Have you run `git cinnabar fsck` recently?')
GitHgHelper.close(rollback=False)

# Try to detect issue #207 as early as possible.
GitHgHelper._helper = False
busted = False
from .hg.repo import getbundle_params, stored_files
for (node, (parent1, parent2)) in progress_iter(
"Checking {} imported file root and head revisions",
util.iteritems(stored_files)):
if not GitHgHelper.check_file(node, parent1, parent2):
busted = True
logging.error("Error in file %s" % node)
if busted:
import json
extra = ""
if getbundle_params:
extra = \
"If it failed, please also copy/paste the following:\n"
extra += json.dumps(getbundle_params, sort_keys=True, indent=4)
raise Abort(
"It seems you have hit a known, rare, and difficult to "
"reproduce issue.\n"
"Your help would be appreciated.\n"
"Please try either `git cinnabar rollback` followed by the "
"same command that just\n"
"failed, or `git cinnabar reclone`.\n"
"Please open a new issue "
"(https://github.com/glandium/git-cinnabar/issues/new)\n"
"mentioning issue #207 and reporting whether the second "
"attempt succeeded.\n" + extra
)
40 changes: 35 additions & 5 deletions cinnabar/hg/repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
from collections import (
defaultdict,
deque,
OrderedDict,
)
from .bundle import (
create_bundle,
Expand Down Expand Up @@ -425,6 +426,9 @@ def log_dag(tag):
return [store.hg_changeset(h) for h in dag.heads('known')]


getbundle_params = {}


class HelperRepo(object):
__slots__ = "_url", "_branchmap", "_heads", "_bookmarks", "_ui", "remote"

Expand Down Expand Up @@ -503,9 +507,13 @@ def known(self, nodes):
return [b == b'1'[0] for b in result]

def getbundle(self, name, heads, common, *args, **kwargs):
data = HgRepoHelper.getbundle((hexlify(h) for h in heads),
(hexlify(c) for c in common),
b','.join(kwargs.get('bundlecaps', ())))
heads = [hexlify(h) for h in heads]
common = [hexlify(c) for c in common]
bundlecaps = b','.join(kwargs.get('bundlecaps', ()))
getbundle_params["heads"] = heads
getbundle_params["common"] = common
getbundle_params["bundlecaps"] = bundlecaps
data = HgRepoHelper.getbundle(heads, common, bundlecaps)
header = readexactly(data, 4)
if header == b'HG20':
return unbundle20(self.ui, data)
Expand Down Expand Up @@ -811,6 +819,9 @@ def iter_files(iter):
assert False


stored_files = OrderedDict()


class BundleApplier(object):
def __init__(self, bundle):
self._bundle = store_changegroup(bundle)
Expand All @@ -824,13 +835,32 @@ def __call__(self, store):
next(self._bundle, None)):
pass

def enumerate_files(iter):
def enumerate_files(iterator):
null_parents = (NULL_NODE_ID, NULL_NODE_ID)
last_name = None
count_names = 0
for count_chunks, (name, chunk) in enumerate(iter, start=1):
for count_chunks, (name, chunk) in enumerate(iterator, start=1):
if name != last_name:
count_names += 1
last_name = name
parents = (chunk.parent1, chunk.parent2)
# Try to detect issue #207 as early as possible.
# Keep track of file roots of files with metadata and at least
# one head that can be traced back to each of those roots.
# Or, in the case of updates, all heads.
if store._has_metadata or chunk.parent1 in stored_files or \
chunk.parent2 in stored_files:
stored_files[chunk.node] = parents
for p in parents:
if p == NULL_NODE_ID:
continue
if stored_files.get(p, null_parents) != null_parents:
del stored_files[p]
elif parents == null_parents:
diff = next(iter(chunk.patch), None)
if diff and diff.start == 0 and \
diff.text_data[:2] == b'\1\n':
stored_files[chunk.node] = parents
yield (count_chunks, count_names), chunk

for rev_chunk in progress_enum(
Expand Down

0 comments on commit 3ecabe6

Please sign in to comment.