← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~ilasc/turnip:make-repack-async into turnip:master

 

Ioana Lasc has proposed merging ~ilasc/turnip:make-repack-async into turnip:master.

Commit message:
Make the repack endpoint asynchronous

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~ilasc/turnip/+git/turnip/+merge/394581

This branch needs more work:

- not sure if all we want in this endpoint is the repack -Ad or do we want the other options back in ?
- you will notice a comment in the unit test "test_repack" inside test_store:

        #store.repack(repo_path)
it looked like the celery task is not triggered and wanted 

- needs more logs 
- possible different return on error on the API ?


-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~ilasc/turnip:make-repack-async into turnip:master.
diff --git a/turnip/api/store.py b/turnip/api/store.py
index 01f4d69..c130a27 100644
--- a/turnip/api/store.py
+++ b/turnip/api/store.py
@@ -471,33 +471,18 @@ def delete_repo(repo_path):
         repo_path = repo_path.encode('UTF-8')
     shutil.rmtree(repo_path)
 
+@app.task
+def repack(repo_path):
+    """Repack a repository with git-repack."""
+    logger = tasks_logger
 
-def repack(repo_path, ignore_alternates=False, single=False,
-           prune=False, no_reuse_delta=False, window=None, depth=None):
-    """Repack a repository with git-repack.
+    logger.info(
+        "Asynchronous repack triggered for repository: "
+            "%s", repo_path)
 
-    :param ignore_alternates: Only repack local refs (git repack --local).
-    :param single: Create a single packfile (git repack -a).
-    :param prune: Remove redundant packs. (git repack -d)
-    :param no_reuse_delta: Force delta recalculation.
-    """
-    ensure_config(repo_path)
-
-    repack_args = ['git', 'repack', '-q']
-    if ignore_alternates:
-        repack_args.append('-l')
-    if no_reuse_delta:
-        repack_args.append('-f')
-    if prune:
-        repack_args.append('-d')
-    if single:
-        repack_args.append('-a')
-    if window:
-        repack_args.append('--window', window)
-    if depth:
-        repack_args.append('--depth', depth)
-
-    return subprocess.check_call(
+    repack_args = ['git', 'repack', '-Ad']
+
+    subprocess.check_call(
         repack_args, cwd=repo_path,
         stderr=subprocess.PIPE, stdout=subprocess.PIPE)
 
diff --git a/turnip/api/tests/test_api.py b/turnip/api/tests/test_api.py
index 75a7c80..3afc961 100644
--- a/turnip/api/tests/test_api.py
+++ b/turnip/api/tests/test_api.py
@@ -833,37 +833,11 @@ class ApiTestCase(TestCase, ApiRepoStoreMixin):
         self.assertEqual(5, len(resp.json))
         self.assertNotIn(excluded_commit, resp.json)
 
-    def test_repo_repack_verify_pack(self):
+    def test_A_repo_repack_verify_pack(self):
         """Ensure commit exists in pack."""
         factory = RepoFactory(self.repo_store)
-        oid = factory.add_commit('foo', 'foobar.txt')
-        resp = self.app.post_json('/repo/{}/repack'.format(self.repo_path),
-                                  {'prune': True, 'single': True})
-        for filename in factory.packs:
-            pack = os.path.join(factory.pack_dir, filename)
-        out = subprocess.check_output(['git', 'verify-pack', pack, '-v'])
+        resp = self.app.post_json('/repo/{}/repack'.format(self.repo_path))
         self.assertEqual(200, resp.status_code)
-        self.assertIn(oid.hex.encode('ascii'), out)
-
-    def test_repo_repack_verify_commits_to_pack(self):
-        """Ensure commits in different packs exist in merged pack."""
-        factory = RepoFactory(self.repo_store)
-        oid = factory.add_commit('foo', 'foobar.txt')
-        with chdir(factory.pack_dir):
-            subprocess.call(['git', 'gc', '-q'])  # pack first commit
-            oid2 = factory.add_commit('bar', 'foobar.txt', [oid])
-            p = subprocess.Popen(['git', 'pack-objects', '-q', 'pack2'],
-                                 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-            p.communicate(input=oid2.hex.encode('ascii'))
-        self.assertEqual(2, len(factory.packs))  # ensure 2 packs exist
-        self.app.post_json('/repo/{}/repack'.format(self.repo_path),
-                           {'prune': True, 'single': True})
-        self.assertEqual(1, len(factory.packs))
-        repacked_pack = os.path.join(factory.pack_dir, factory.packs[0])
-        out = subprocess.check_output(['git', 'verify-pack',
-                                       repacked_pack, '-v'])
-        self.assertIn(oid.hex.encode('ascii'), out)
-        self.assertIn(oid2.hex.encode('ascii'), out)
 
     def test_repo_detect_merges_missing_target(self):
         """A non-existent target OID returns HTTP 404."""
diff --git a/turnip/api/tests/test_store.py b/turnip/api/tests/test_store.py
index 5108581..f839785 100644
--- a/turnip/api/tests/test_store.py
+++ b/turnip/api/tests/test_store.py
@@ -446,3 +446,45 @@ class InitTestCase(TestCase):
         self.assertEqual(before_refs_len - 1, len(orig.references.objects))
         self.assertNotIn(
             new_branch_name, [i.name for i in orig.references.objects])
+
+    def getLooseObjects(self, path):
+        curdir = os.getcwd()
+
+        os.chdir(path)
+        objects = subprocess.check_output(['git', 'count-objects'])
+        if (int(objects[0:(objects.find(' objects'))]) == 0):
+            os.chdir(curdir)
+            return True
+        else:
+            os.chdir(curdir)
+            return False
+
+    def test_repack(self):
+        celery_fixture = CeleryWorkerFixture()
+        self.useFixture(celery_fixture)
+
+        curdir = os.getcwd()
+        gc_path = os.path.join(self.repo_store, 'test_gc/')
+        gc_factory = RepoFactory(
+            gc_path, num_branches=3, num_commits=30, num_tags=3)
+        os.mkdir(os.path.join(gc_path, 'worktree'))
+        gc_factory.build()
+        os.chdir(os.path.join(gc_path, 'worktree'))
+        repo_config = pygit2.Repository(gc_path).config
+        repo_config['core.bare'] = False
+        repo_config['core.worktree'] = os.path.join(gc_path, 'worktree')
+        assert 'core.worktree' in repo_config
+        assert repo_config['core.worktree']
+
+        repo_path = os.path.join(gc_path, 'worktree')
+        objects = subprocess.check_output(['git', 'count-objects'])
+        self.assertGreater(int(objects[0:(objects.find(' objects'))]), 0)
+        kwargs = dict(repo_path=repo_path)
+
+        #store.repack(repo_path)
+        store.repack.apply_async((repo_path, ))
+
+        celery_fixture.waitUntil(
+            2, lambda: self.getLooseObjects(repo_path))
+
+        os.chdir(curdir)
diff --git a/turnip/api/views.py b/turnip/api/views.py
index 049f3ee..b9d257f 100644
--- a/turnip/api/views.py
+++ b/turnip/api/views.py
@@ -145,23 +145,17 @@ class RepackAPI(BaseAPI):
     def post(self, repo_store, repo_name):
         repo_path = os.path.join(repo_store, repo_name)
 
-        data = extract_json_data(self.request)
-        ignore_alternates = data.get('ignore_alternates')
-        no_reuse_delta = data.get('no_reuse_delta')
-        prune = data.get('prune')
-        single = data.get('single')
-        window = data.get('window')
-        depth = data.get('depth')
-
         try:
-            store.repack(repo_path, single=single, prune=prune,
-                         no_reuse_delta=no_reuse_delta,
-                         ignore_alternates=ignore_alternates,
-                         window=window, depth=depth)
+            kwargs = dict(repo_path=repo_path)
+            store.repack.apply_async(kwargs=kwargs)
         except (CalledProcessError):
-            return exc.HTTPInternalServerError()
-        return
+            self.request.errors.add(
+                'The server has either erred or is incapable of performing '
+                'the requested operation.')
+            self.request.errors.status = 500
+            return
 
+        return Response(status=200)
 
 @resource(path='/repo/{name}/refs-copy')
 class RefCopyAPI(BaseAPI):

Follow ups