Kiln » Dependencies » Dulwich Read More
Clone URL:  
Pushed to one repository · View In Graph Contained in master

Previously, the word "type" was massively overloaded in objects.py. It
could refer to the numeric type of an object (obj.type or

obj._num_type), the type name of the object (obj._type or FOO_ID), or
the actual class (python type) of the object. This could get quite
confusing.

This change does the following:
-Replace obj._type and obj._num_type with type_name and type_num. (The
type property is retained for client compatibility, but is marked as
deprecated.) Change the various type maps and callers to use the
object's public members as keys.
-Add a convenience function object_class that takes either a string or
an int and dispatches to the appropriate type map.
-Rename the FOO_ID constants as _FOO_HEADER, since those constants
were previously overloaded to mean both header field names and type
names. There is some overlap, but this is intentional.
-Use isinstance for type comparisons rather than type, which is common
python practice and avoids the problematic word altogether.

Changeset 684b4ba58ae9

Parent f58dc961489a

committed by Jelmer Vernooij

authored by Dave Borowitz

Changes to 8 files · Browse files at 684b4ba58ae9 Showing diff from parent f58dc961489a Diff from another changeset...

Change 1 of 1 Show Entire File dulwich/​errors.py Stacked
 
37
38
39
40
 
41
42
43
 
 
44
45
46
 
47
48
49
 
50
51
52
53
54
55
 
 
56
57
58
59
60
61
 
 
62
63
64
65
66
67
 
68
69
70
71
72
73
 
 
74
75
76
 
37
38
39
 
40
41
 
 
42
43
44
45
 
46
47
 
 
48
49
50
51
52
 
 
53
54
55
56
57
58
 
 
59
60
61
62
63
64
65
 
66
67
68
69
70
 
 
71
72
73
74
75
@@ -37,40 +37,39 @@
   class WrongObjectException(Exception):   """Baseclass for all the _ is not a _ exceptions on objects. - +   Do not instantiate directly. - - Subclasses should define a _type attribute that indicates what + + Subclasses should define a type_name attribute that indicates what   was expected if they were raised.   """ - +   def __init__(self, sha, *args, **kwargs): - string = "%s is not a %s" % (sha, self._type) - Exception.__init__(self, string) + Exception.__init__(self, "%s is not a %s" % (sha, self.type_name))      class NotCommitError(WrongObjectException):   """Indicates that the sha requested does not point to a commit.""" - - _type = 'commit' + + type_name = 'commit'      class NotTreeError(WrongObjectException):   """Indicates that the sha requested does not point to a tree.""" - - _type = 'tree' + + type_name = 'tree'      class NotTagError(WrongObjectException):   """Indicates that the sha requested does not point to a tag."""   - _type = 'tag' + type_name = 'tag'      class NotBlobError(WrongObjectException):   """Indicates that the sha requested does not point to a blob.""" - - _type = 'blob' + + type_name = 'blob'      class MissingCommitError(Exception):
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
 
96
97
98
99
100
101
102
 
 
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
 
288
289
 
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
 
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
 
507
508
 
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
 
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
 
95
96
97
98
99
100
 
 
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
 
287
288
 
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
 
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
 
506
507
 
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
 
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
 # object_store.py -- Object store for git objects  # Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@samba.org>  #  # This program is free software; you can redistribute it and/or  # modify it under the terms of the GNU General Public License  # as published by the Free Software Foundation; either version 2  # or (at your option) a later version of the License.  #  # This program is distributed in the hope that it will be useful,  # but WITHOUT ANY WARRANTY; without even the implied warranty of  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the  # GNU General Public License for more details.  #  # You should have received a copy of the GNU General Public License  # along with this program; if not, write to the Free Software  # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,  # MA 02110-1301, USA.      """Git object store interfaces and implementation."""      import errno  import itertools  import os  import posixpath  import stat  import tempfile  import urllib2    from dulwich.errors import (   NotTreeError,   )  from dulwich.file import GitFile  from dulwich.objects import (   Commit,   ShaFile,   Tag,   Tree,   hex_to_sha,   sha_to_hex,   S_ISGITLINK,   )  from dulwich.pack import (   Pack,   PackData,   iter_sha1,   load_pack_index,   write_pack,   write_pack_data,   write_pack_index_v2,   )    PACKDIR = 'pack'      class BaseObjectStore(object):   """Object store interface."""     def determine_wants_all(self, refs):   return [sha for (ref, sha) in refs.iteritems() if not sha in self and not ref.endswith("^{}")]     def iter_shas(self, shas):   """Iterate over the objects for the specified shas.     :param shas: Iterable object with SHAs   :return: Object iterator   """   return ObjectStoreIterator(self, shas)     def contains_loose(self, sha):   """Check if a particular object is present by SHA1 and is loose."""   raise NotImplementedError(self.contains_loose)     def contains_packed(self, sha):   """Check if a particular object is present by SHA1 and is packed."""   raise NotImplementedError(self.contains_packed)     def __contains__(self, sha):   """Check if a particular object is present by SHA1.     This method makes no distinction between loose and packed objects.   """   return self.contains_packed(sha) or self.contains_loose(sha)     @property   def packs(self):   """Iterable of pack objects."""   raise NotImplementedError     def get_raw(self, name):   """Obtain the raw text for an object.     :param name: sha for the object. - :return: tuple with object type and object contents. + :return: tuple with numeric type and object contents.   """   raise NotImplementedError(self.get_raw)     def __getitem__(self, sha):   """Obtain an object by SHA1.""" - type, uncomp = self.get_raw(sha) - return ShaFile.from_raw_string(type, uncomp) + type_num, uncomp = self.get_raw(sha) + return ShaFile.from_raw_string(type_num, uncomp)     def __iter__(self):   """Iterate over the SHAs that are present in this store."""   raise NotImplementedError(self.__iter__)     def add_object(self, obj):   """Add a single object to this object store.     """   raise NotImplementedError(self.add_object)     def add_objects(self, objects):   """Add a set of objects to this object store.     :param objects: Iterable over a list of objects.   """   raise NotImplementedError(self.add_objects)     def tree_changes(self, source, target, want_unchanged=False):   """Find the differences between the contents of two trees     :param object_store: Object store to use for retrieving tree contents   :param tree: SHA1 of the root tree   :param want_unchanged: Whether unchanged files should be reported   :return: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha)   """   todo = set([(source, target, "")])   while todo:   (sid, tid, path) = todo.pop()   if sid is not None:   stree = self[sid]   else:   stree = {}   if tid is not None:   ttree = self[tid]   else:   ttree = {}   for name, oldmode, oldhexsha in stree.iteritems():   oldchildpath = posixpath.join(path, name)   try:   (newmode, newhexsha) = ttree[name]   newchildpath = oldchildpath   except KeyError:   newmode = None   newhexsha = None   newchildpath = None   if (want_unchanged or oldmode != newmode or   oldhexsha != newhexsha):   if stat.S_ISDIR(oldmode):   if newmode is None or stat.S_ISDIR(newmode):   todo.add((oldhexsha, newhexsha, oldchildpath))   else:   # entry became a file   todo.add((oldhexsha, None, oldchildpath))   yield ((None, newchildpath), (None, newmode), (None, newhexsha))   else:   if newmode is not None and stat.S_ISDIR(newmode):   # entry became a dir   yield ((oldchildpath, None), (oldmode, None), (oldhexsha, None))   todo.add((None, newhexsha, newchildpath))   else:   yield ((oldchildpath, newchildpath), (oldmode, newmode), (oldhexsha, newhexsha))     for name, newmode, newhexsha in ttree.iteritems():   childpath = posixpath.join(path, name)   if not name in stree:   if not stat.S_ISDIR(newmode):   yield ((None, childpath), (None, newmode), (None, newhexsha))   else:   todo.add((None, newhexsha, childpath))     def iter_tree_contents(self, tree):   """Yield (path, mode, hexsha) tuples for all non-Tree objects in a tree.     :param tree: SHA1 of the root of the tree   """   todo = set([(tree, "")])   while todo:   (tid, tpath) = todo.pop()   tree = self[tid]   for name, mode, hexsha in tree.iteritems():   path = posixpath.join(tpath, name)   if stat.S_ISDIR(mode):   todo.add((hexsha, path))   else:   yield path, mode, hexsha     def find_missing_objects(self, haves, wants, progress=None,   get_tagged=None):   """Find the missing objects required for a set of revisions.     :param haves: Iterable over SHAs already in common.   :param wants: Iterable over SHAs of objects to fetch.   :param progress: Simple progress function that will be called with   updated progress strings.   :param get_tagged: Function that returns a dict of pointed-to sha -> tag   sha for including tags.   :return: Iterator over (sha, path) pairs.   """   finder = MissingObjectFinder(self, haves, wants, progress, get_tagged)   return iter(finder.next, None)     def find_common_revisions(self, graphwalker):   """Find which revisions this store has in common using graphwalker.     :param graphwalker: A graphwalker object.   :return: List of SHAs that are in common   """   haves = []   sha = graphwalker.next()   while sha:   if sha in self:   haves.append(sha)   graphwalker.ack(sha)   sha = graphwalker.next()   return haves     def get_graph_walker(self, heads):   """Obtain a graph walker for this object store.     :param heads: Local heads to start search with   :return: GraphWalker object   """   return ObjectStoreGraphWalker(heads, lambda sha: self[sha].parents)     def generate_pack_contents(self, have, want):   """Iterate over the contents of a pack file.     :param have: List of SHA1s of objects that should not be sent   :param want: List of SHA1s of objects that should be sent   """   return self.iter_shas(self.find_missing_objects(have, want))      class PackBasedObjectStore(BaseObjectStore):     def __init__(self):   self._pack_cache = None     def contains_packed(self, sha):   """Check if a particular object is present by SHA1 and is packed."""   for pack in self.packs:   if sha in pack:   return True   return False     def _load_packs(self):   raise NotImplementedError(self._load_packs)     def _pack_cache_stale(self):   """Check whether the pack cache is stale."""   raise NotImplementedError(self._pack_cache_stale)     def _add_known_pack(self, pack):   """Add a newly appeared pack to the cache by path.     """   if self._pack_cache is not None:   self._pack_cache.append(pack)     @property   def packs(self):   """List with pack objects."""   if self._pack_cache is None or self._pack_cache_stale():   self._pack_cache = self._load_packs()   return self._pack_cache     def _iter_loose_objects(self):   raise NotImplementedError(self._iter_loose_objects)     def _get_loose_object(self, sha):   raise NotImplementedError(self._get_loose_object)     def __iter__(self):   """Iterate over the SHAs that are present in this store."""   iterables = self.packs + [self._iter_loose_objects()]   return itertools.chain(*iterables)     def contains_loose(self, sha):   """Check if a particular object is present by SHA1 and is loose."""   return self._get_loose_object(sha) is not None     def get_raw(self, name):   """Obtain the raw text for an object. - +   :param name: sha for the object. - :return: tuple with object type and object contents. + :return: tuple with numeric type and object contents.   """   if len(name) == 40:   sha = hex_to_sha(name)   hexsha = name   elif len(name) == 20:   sha = name   hexsha = None   else:   raise AssertionError   for pack in self.packs:   try:   return pack.get_raw(sha)   except KeyError:   pass   if hexsha is None:   hexsha = sha_to_hex(name)   ret = self._get_loose_object(hexsha)   if ret is not None: - return ret.type, ret.as_raw_string() + return ret.type_num, ret.as_raw_string()   raise KeyError(hexsha)     def add_objects(self, objects):   """Add a set of objects to this object store.     :param objects: Iterable over objects, should support __len__.   """   if len(objects) == 0:   # Don't bother writing an empty pack file   return   f, commit = self.add_pack()   write_pack_data(f, objects, len(objects))   commit()      class DiskObjectStore(PackBasedObjectStore):   """Git-style object store that exists on disk."""     def __init__(self, path):   """Open an object store.     :param path: Path of the object store.   """   super(DiskObjectStore, self).__init__()   self.path = path   self.pack_dir = os.path.join(self.path, PACKDIR)   self._pack_cache_time = 0     def _load_packs(self):   pack_files = []   try:   self._pack_cache_time = os.stat(self.pack_dir).st_mtime   pack_dir_contents = os.listdir(self.pack_dir)   for name in pack_dir_contents:   # TODO: verify that idx exists first   if name.startswith("pack-") and name.endswith(".pack"):   filename = os.path.join(self.pack_dir, name)   pack_files.append((os.stat(filename).st_mtime, filename))   except OSError, e:   if e.errno == errno.ENOENT:   return []   raise   pack_files.sort(reverse=True)   suffix_len = len(".pack")   return [Pack(f[:-suffix_len]) for _, f in pack_files]     def _pack_cache_stale(self):   try:   return os.stat(self.pack_dir).st_mtime > self._pack_cache_time   except OSError, e:   if e.errno == errno.ENOENT:   return True   raise     def _get_shafile_path(self, sha):   dir = sha[:2]   file = sha[2:]   # Check from object dir   return os.path.join(self.path, dir, file)     def _iter_loose_objects(self):   for base in os.listdir(self.path):   if len(base) != 2:   continue   for rest in os.listdir(os.path.join(self.path, base)):   yield base+rest     def _get_loose_object(self, sha):   path = self._get_shafile_path(sha)   try:   return ShaFile.from_file(path)   except OSError, e:   if e.errno == errno.ENOENT:   return None   raise     def move_in_thin_pack(self, path):   """Move a specific file containing a pack into the pack directory.     :note: The file should be on the same file system as the   packs directory.     :param path: Path to the pack file.   """   data = PackData(path)     # Write index for the thin pack (do we really need this?)   temppath = os.path.join(self.pack_dir,   sha_to_hex(urllib2.randombytes(20))+".tempidx")   data.create_index_v2(temppath, self.get_raw)   p = Pack.from_objects(data, load_pack_index(temppath))     # Write a full pack version   temppath = os.path.join(self.pack_dir,   sha_to_hex(urllib2.randombytes(20))+".temppack")   write_pack(temppath, ((o, None) for o in p.iterobjects(self.get_raw)),   len(p))   pack_sha = load_pack_index(temppath+".idx").objects_sha1()   newbasename = os.path.join(self.pack_dir, "pack-%s" % pack_sha)   os.rename(temppath+".pack", newbasename+".pack")   os.rename(temppath+".idx", newbasename+".idx")   self._add_known_pack(Pack(newbasename))     def move_in_pack(self, path):   """Move a specific file containing a pack into the pack directory.     :note: The file should be on the same file system as the   packs directory.     :param path: Path to the pack file.   """   p = PackData(path)   entries = p.sorted_entries()   basename = os.path.join(self.pack_dir,   "pack-%s" % iter_sha1(entry[0] for entry in entries))   write_pack_index_v2(basename+".idx", entries, p.get_stored_checksum())   p.close()   os.rename(path, basename + ".pack")   self._add_known_pack(Pack(basename))     def add_thin_pack(self):   """Add a new thin pack to this object store.     Thin packs are packs that contain deltas with parents that exist   in a different pack.   """   fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")   f = os.fdopen(fd, 'wb')   def commit():   os.fsync(fd)   f.close()   if os.path.getsize(path) > 0:   self.move_in_thin_pack(path)   return f, commit     def add_pack(self):   """Add a new pack to this object store.     :return: Fileobject to write to and a commit function to   call when the pack is finished.   """   fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack")   f = os.fdopen(fd, 'wb')   def commit():   os.fsync(fd)   f.close()   if os.path.getsize(path) > 0:   self.move_in_pack(path)   return f, commit     def add_object(self, obj):   """Add a single object to this object store.     :param obj: Object to add   """   dir = os.path.join(self.path, obj.id[:2])   try:   os.mkdir(dir)   except OSError, e:   if e.errno != errno.EEXIST:   raise   path = os.path.join(dir, obj.id[2:])   if os.path.exists(path):   return # Already there, no need to write again   f = GitFile(path, 'wb')   try:   f.write(obj.as_legacy_object())   finally:   f.close()      class MemoryObjectStore(BaseObjectStore):   """Object store that keeps all objects in memory."""     def __init__(self):   super(MemoryObjectStore, self).__init__()   self._data = {}     def contains_loose(self, sha):   """Check if a particular object is present by SHA1 and is loose."""   return sha in self._data     def contains_packed(self, sha):   """Check if a particular object is present by SHA1 and is packed."""   return False     def __iter__(self):   """Iterate over the SHAs that are present in this store."""   return self._data.iterkeys()     @property   def packs(self):   """List with pack objects."""   return []     def get_raw(self, name):   """Obtain the raw text for an object. - +   :param name: sha for the object. - :return: tuple with object type and object contents. + :return: tuple with numeric type and object contents.   """   return self[name].as_raw_string()     def __getitem__(self, name):   return self._data[name]     def add_object(self, obj):   """Add a single object to this object store.     """   self._data[obj.id] = obj     def add_objects(self, objects):   """Add a set of objects to this object store.     :param objects: Iterable over a list of objects.   """   for obj, path in objects:   self._data[obj.id] = obj      class ObjectImporter(object):   """Interface for importing objects."""     def __init__(self, count):   """Create a new ObjectImporter.     :param count: Number of objects that's going to be imported.   """   self.count = count     def add_object(self, object):   """Add an object."""   raise NotImplementedError(self.add_object)     def finish(self, object):   """Finish the imoprt and write objects to disk."""   raise NotImplementedError(self.finish)      class ObjectIterator(object):   """Interface for iterating over objects."""     def iterobjects(self):   raise NotImplementedError(self.iterobjects)      class ObjectStoreIterator(ObjectIterator):   """ObjectIterator that works on top of an ObjectStore."""     def __init__(self, store, sha_iter):   """Create a new ObjectIterator.     :param store: Object store to retrieve from   :param sha_iter: Iterator over (sha, path) tuples   """   self.store = store   self.sha_iter = sha_iter   self._shas = []     def __iter__(self):   """Yield tuple with next object and path."""   for sha, path in self.itershas():   yield self.store[sha], path     def iterobjects(self):   """Iterate over just the objects."""   for o, path in self:   yield o     def itershas(self):   """Iterate over the SHAs."""   for sha in self._shas:   yield sha   for sha in self.sha_iter:   self._shas.append(sha)   yield sha     def __contains__(self, needle):   """Check if an object is present.     :note: This checks if the object is present in   the underlying object store, not if it would   be yielded by the iterator.     :param needle: SHA1 of the object to check for   """   return needle in self.store     def __getitem__(self, key):   """Find an object by SHA1.     :note: This retrieves the object from the underlying   object store. It will also succeed if the object would   not be returned by the iterator.   """   return self.store[key]     def __len__(self):   """Return the number of objects."""   return len(list(self.itershas()))      def tree_lookup_path(lookup_obj, root_sha, path):   """Lookup an object in a Git tree.     :param lookup_obj: Callback for retrieving object by SHA1   :param root_sha: SHA1 of the root tree   :param path: Path to lookup   """   parts = path.split("/")   sha = root_sha   mode = None   for p in parts:   obj = lookup_obj(sha) - if type(obj) is not Tree: + if not isinstance(obj, Tree):   raise NotTreeError(sha)   if p == '':   continue   mode, sha = obj[p]   return mode, sha      class MissingObjectFinder(object):   """Find the objects missing from another object store.     :param object_store: Object store containing at least all objects to be   sent   :param haves: SHA1s of commits not to send (already present in target)   :param wants: SHA1s of commits to send   :param progress: Optional function to report progress to.   :param get_tagged: Function that returns a dict of pointed-to sha -> tag   sha for including tags.   :param tagged: dict of pointed-to sha -> tag sha for including tags   """     def __init__(self, object_store, haves, wants, progress=None,   get_tagged=None):   self.sha_done = set(haves)   self.objects_to_send = set([(w, None, False) for w in wants if w not in haves])   self.object_store = object_store   if progress is None:   self.progress = lambda x: None   else:   self.progress = progress   self._tagged = get_tagged and get_tagged() or {}     def add_todo(self, entries):   self.objects_to_send.update([e for e in entries if not e[0] in self.sha_done])     def parse_tree(self, tree):   self.add_todo([(sha, name, not stat.S_ISDIR(mode)) for (mode, name, sha) in tree.entries() if not S_ISGITLINK(mode)])     def parse_commit(self, commit):   self.add_todo([(commit.tree, "", False)])   self.add_todo([(p, None, False) for p in commit.parents])     def parse_tag(self, tag):   self.add_todo([(tag.object[1], None, False)])     def next(self):   if not self.objects_to_send:   return None   (sha, name, leaf) = self.objects_to_send.pop()   if not leaf:   o = self.object_store[sha]   if isinstance(o, Commit):   self.parse_commit(o)   elif isinstance(o, Tree):   self.parse_tree(o)   elif isinstance(o, Tag):   self.parse_tag(o)   if sha in self._tagged:   self.add_todo([(self._tagged[sha], None, True)])   self.sha_done.add(sha)   self.progress("counting objects: %d\r" % len(self.sha_done))   return (sha, name)      class ObjectStoreGraphWalker(object):   """Graph walker that finds out what commits are missing from an object   store.     :ivar heads: Revisions without descendants in the local repo   :ivar get_parents: Function to retrieve parents in the local repo   """     def __init__(self, local_heads, get_parents):   """Create a new instance.     :param local_heads: Heads to start search with   :param get_parents: Function for finding the parents of a SHA1.   """   self.heads = set(local_heads)   self.get_parents = get_parents   self.parents = {}     def ack(self, sha):   """Ack that a revision and its ancestors are present in the source."""   if sha in self.heads:   self.heads.remove(sha)   if sha in self.parents:   for p in self.parents[sha]:   self.ack(p)     def next(self):   """Iterate over ancestors of heads in the target."""   if self.heads:   ret = self.heads.pop()   ps = self.get_parents(ret)   self.parents[ret] = ps   self.heads.update(ps)   return ret   return None
Change 1 of 21 Show Entire File dulwich/​objects.py Stacked
 
41
42
43
44
45
46
47
48
49
50
51
52
53
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
56
57
58
59
 
60
61
62
 
89
90
91
 
 
 
 
 
 
 
 
 
92
93
94
 
97
98
99
100
101
102
103
 
 
 
 
104
105
106
 
121
122
123
124
 
125
126
127
 
163
164
165
166
 
167
168
 
169
170
 
171
172
173
 
205
206
207
208
209
 
 
210
211
212
213
 
 
214
215
216
217
 
218
219
220
221
222
 
 
223
224
225
226
 
 
227
228
229
230
 
231
232
233
234
235
236
237
238
239
 
 
 
240
241
242
 
243
244
245
 
266
267
268
269
 
270
271
272
273
 
 
 
274
275
276
 
291
292
293
294
295
 
 
296
297
298
 
321
322
323
324
 
325
326
327
 
329
330
331
332
333
 
 
334
335
336
 
339
340
341
342
343
344
345
 
 
 
 
346
347
348
 
353
354
355
356
357
358
 
 
 
359
360
361
 
362
363
 
 
 
364
365
366
 
374
375
376
377
 
378
379
380
381
 
 
 
382
383
 
384
385
386
 
400
401
402
403
404
405
 
 
 
 
 
 
406
407
408
409
 
410
411
412
 
471
472
473
474
475
 
 
476
477
478
 
483
484
485
486
 
487
488
489
 
574
575
576
577
578
 
 
579
580
581
 
588
589
590
591
 
592
593
594
 
603
604
605
606
 
607
608
 
609
610
 
611
612
613
614
 
615
616
617
618
 
619
620
621
 
623
624
625
626
 
627
628
629
630
 
 
 
 
 
 
 
631
632
 
633
634
635
 
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
 
 
 
 
 
 
 
 
 
 
 
 
 
703
704
705
 
41
42
43
 
 
 
 
 
 
 
 
 
 
 
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
 
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
 
111
112
113
 
 
 
 
114
115
116
117
118
119
120
 
135
136
137
 
138
139
140
141
 
177
178
179
 
180
181
 
182
183
 
184
185
186
187
 
219
220
221
 
 
222
223
224
225
 
 
226
227
228
 
 
 
229
230
231
232
 
 
233
234
235
236
 
 
237
238
239
 
 
 
240
241
242
243
244
245
246
 
 
 
247
248
249
250
251
 
252
253
254
255
 
276
277
278
 
279
280
281
 
 
282
283
284
285
286
287
 
302
303
304
 
 
305
306
307
308
309
 
332
333
334
 
335
336
337
338
 
340
341
342
 
 
343
344
345
346
347
 
350
351
352
 
 
 
 
353
354
355
356
357
358
359
 
364
365
366
 
 
 
367
368
369
370
371
 
372
373
 
374
375
376
377
378
379
 
387
388
389
 
390
391
 
 
 
392
393
394
395
 
396
397
398
399
 
413
414
415
 
 
 
416
417
418
419
420
421
422
423
424
 
425
426
427
428
 
487
488
489
 
 
490
491
492
493
494
 
499
500
501
 
502
503
504
505
 
590
591
592
 
 
593
594
595
596
597
 
604
605
606
 
607
608
609
610
 
619
620
621
 
622
623
 
624
625
 
626
627
628
629
 
630
631
632
633
 
634
635
636
637
 
639
640
641
 
642
643
 
 
 
644
645
646
647
648
649
650
651
 
652
653
654
655
 
705
706
707
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
@@ -41,22 +41,27 @@
  make_sha,   )   -BLOB_ID = "blob" -TAG_ID = "tag" -TREE_ID = "tree" -COMMIT_ID = "commit" -PARENT_ID = "parent" -AUTHOR_ID = "author" -COMMITTER_ID = "committer" -OBJECT_ID = "object" -TYPE_ID = "type" -TAGGER_ID = "tagger" -ENCODING_ID = "encoding" + +# Header fields for commits +_TREE_HEADER = "tree" +_PARENT_HEADER = "parent" +_AUTHOR_HEADER = "author" +_COMMITTER_HEADER = "committer" +_ENCODING_HEADER = "encoding" + + +# Header fields for objects +_OBJECT_HEADER = "object" +_TYPE_HEADER = "type" +_TAG_HEADER = "tag" +_TAGGER_HEADER = "tagger" +    S_IFGITLINK = 0160000    def S_ISGITLINK(m):   return (stat.S_IFMT(m) == S_IFGITLINK) +    def _decompress(string):   dcomp = zlib.decompressobj() @@ -89,6 +94,15 @@
  return property(get, set, doc=docstring)     +def object_class(type): + """Get the object class corresponding to the given type. + + :param type: Either a type name string or a numeric type. + :return: The ShaFile subclass corresponding to the given type. + """ + return _TYPE_MAP[type] + +  class ShaFile(object):   """A git SHA file."""   @@ -97,10 +111,10 @@
  """Parse a legacy object, creating it and setting object._text"""   text = _decompress(map)   object = None - for posstype in type_map.keys(): - if text.startswith(posstype): - object = type_map[posstype]() - text = text[len(posstype):] + for cls in OBJECT_CLASSES: + if text.startswith(cls.type_name): + object = cls() + text = text[len(cls.type_name):]   break   assert object is not None, "%s is not a known object type" % text[:9]   assert text[0] == ' ', "%s is not a space" % text[0] @@ -121,7 +135,7 @@
    def as_legacy_object(self):   text = self.as_raw_string() - return zlib.compress("%s %d\0%s" % (self._type, len(text), text)) + return zlib.compress("%s %d\0%s" % (self.type_name, len(text), text))     def as_raw_chunks(self):   if self._needs_serialization: @@ -163,11 +177,11 @@
  used = 0   byte = ord(map[used])   used += 1 - num_type = (byte >> 4) & 7 + type_num = (byte >> 4) & 7   try: - object = num_type_map[num_type]() + object = object_class(type_num)()   except KeyError: - raise AssertionError("Not a known type: %d" % num_type) + raise AssertionError("Not a known type: %d" % type_num)   while (byte & 0x80) != 0:   byte = ord(map[used])   used += 1 @@ -205,41 +219,37 @@
  finally:   f.close()   - @classmethod - def from_raw_string(cls, type, string): + @staticmethod + def from_raw_string(type_num, string):   """Creates an object of the indicated type from the raw string given.   - Type is the numeric type of an object. String is the raw uncompressed - contents. + :param type_num: The numeric type of the object. + :param string: The raw uncompressed contents.   """ - real_class = num_type_map[type] - obj = real_class() - obj.type = type + obj = object_class(type_num)()   obj.set_raw_string(string)   return obj   - @classmethod - def from_raw_chunks(cls, type, chunks): + @staticmethod + def from_raw_chunks(type_num, chunks):   """Creates an object of the indicated type from the raw chunks given.   - Type is the numeric type of an object. Chunks is a sequence of the raw - uncompressed contents. + :param type_num: The numeric type of the object. + :param chunks: An iterable of the raw uncompressed contents.   """ - real_class = num_type_map[type] - obj = real_class() - obj.type = type + obj = object_class(type_num)()   obj.set_raw_chunks(chunks)   return obj     @classmethod   def from_string(cls, string):   """Create a blob from a string.""" - shafile = cls() - shafile.set_raw_string(string) - return shafile + obj = cls() + obj.set_raw_string(string) + return obj     def _header(self): - return "%s %lu\0" % (self._type, self.raw_length()) + return "%s %lu\0" % (self.type_name, self.raw_length())     def raw_length(self):   """Returns the length of the raw string of this object.""" @@ -266,11 +276,12 @@
  return self.sha().hexdigest()     def get_type(self): - return self._num_type + return self.type_num     def set_type(self, type): - self._num_type = type - + self.type_num = type + + # DEPRECATED: use type_num or type_name as needed.   type = property(get_type, set_type)     def __repr__(self): @@ -291,8 +302,8 @@
 class Blob(ShaFile):   """A Git Blob object."""   - _type = BLOB_ID - _num_type = 3 + type_name = 'blob' + type_num = 3     def __init__(self):   super(Blob, self).__init__() @@ -321,7 +332,7 @@
  @classmethod   def from_file(cls, filename):   blob = ShaFile.from_file(filename) - if blob._type != cls._type: + if not isinstance(blob, cls):   raise NotBlobError(filename)   return blob   @@ -329,8 +340,8 @@
 class Tag(ShaFile):   """A Git Tag object."""   - _type = TAG_ID - _num_type = 4 + type_name = 'tag' + type_num = 4     def __init__(self):   super(Tag, self).__init__() @@ -339,10 +350,10 @@
    @classmethod   def from_file(cls, filename): - blob = ShaFile.from_file(filename) - if blob._type != cls._type: - raise NotBlobError(filename) - return blob + tag = ShaFile.from_file(filename) + if not isinstance(tag, cls): + raise NotTagError(filename) + return tag     @classmethod   def from_string(cls, string): @@ -353,14 +364,16 @@
    def _serialize(self):   chunks = [] - chunks.append("%s %s\n" % (OBJECT_ID, self._object_sha)) - chunks.append("%s %s\n" % (TYPE_ID, num_type_map[self._object_type]._type)) - chunks.append("%s %s\n" % (TAG_ID, self._name)) + chunks.append("%s %s\n" % (_OBJECT_HEADER, self._object_sha)) + chunks.append("%s %s\n" % (_TYPE_HEADER, self._object_class.type_name)) + chunks.append("%s %s\n" % (_TAG_HEADER, self._name))   if self._tagger:   if self._tag_time is None: - chunks.append("%s %s\n" % (TAGGER_ID, self._tagger)) + chunks.append("%s %s\n" % (_TAGGER_HEADER, self._tagger))   else: - chunks.append("%s %s %d %s\n" % (TAGGER_ID, self._tagger, self._tag_time, format_timezone(self._tag_timezone))) + chunks.append("%s %s %d %s\n" % ( + _TAGGER_HEADER, self._tagger, self._tag_time, + format_timezone(self._tag_timezone)))   chunks.append("\n") # To close headers   chunks.append(self._message)   return chunks @@ -374,13 +387,13 @@
  if l == "":   break # empty line indicates end of headers   (field, value) = l.split(" ", 1) - if field == OBJECT_ID: + if field == _OBJECT_HEADER:   self._object_sha = value - elif field == TYPE_ID: - self._object_type = type_map[value] - elif field == TAG_ID: + elif field == _TYPE_HEADER: + self._object_class = object_class(value) + elif field == _TAG_HEADER:   self._name = value - elif field == TAGGER_ID: + elif field == _TAGGER_HEADER:   try:   sep = value.index("> ")   except ValueError: @@ -400,13 +413,16 @@
  self._message = f.read()     def _get_object(self): - """Returns the object pointed by this tag, represented as a tuple(type, sha)""" - self._ensure_parsed() - return (self._object_type, self._object_sha) + """Get the object pointed to by this tag. + + :return: tuple of (object class, sha). + """ + self._ensure_parsed() + return (self._object_class, self._object_sha)     def _set_object(self, value):   self._ensure_parsed() - (self._object_type, self._object_sha) = value + (self._object_class, self._object_sha) = value   self._needs_serialization = True     object = property(_get_object, _set_object) @@ -471,8 +487,8 @@
 class Tree(ShaFile):   """A Git tree object"""   - _type = TREE_ID - _num_type = 2 + type_name = 'tree' + type_num = 2     def __init__(self):   super(Tree, self).__init__() @@ -483,7 +499,7 @@
  @classmethod   def from_file(cls, filename):   tree = ShaFile.from_file(filename) - if tree._type != cls._type: + if not isinstance(tree, cls):   raise NotTreeError(filename)   return tree   @@ -574,8 +590,8 @@
 class Commit(ShaFile):   """A git commit object"""   - _type = COMMIT_ID - _num_type = 1 + type_name = 'commit' + type_num = 1     def __init__(self):   super(Commit, self).__init__() @@ -588,7 +604,7 @@
  @classmethod   def from_file(cls, filename):   commit = ShaFile.from_file(filename) - if commit._type != cls._type: + if not isinstance(commit, cls):   raise NotCommitError(filename)   return commit   @@ -603,19 +619,19 @@
  # Empty line indicates end of headers   break   (field, value) = l.split(" ", 1) - if field == TREE_ID: + if field == _TREE_HEADER:   self._tree = value - elif field == PARENT_ID: + elif field == _PARENT_HEADER:   self._parents.append(value) - elif field == AUTHOR_ID: + elif field == _AUTHOR_HEADER:   self._author, timetext, timezonetext = value.rsplit(" ", 2)   self._author_time = int(timetext)   self._author_timezone = parse_timezone(timezonetext) - elif field == COMMITTER_ID: + elif field == _COMMITTER_HEADER:   self._committer, timetext, timezonetext = value.rsplit(" ", 2)   self._commit_time = int(timetext)   self._commit_timezone = parse_timezone(timezonetext) - elif field == ENCODING_ID: + elif field == _ENCODING_HEADER:   self._encoding = value   else:   self._extra.append((field, value)) @@ -623,13 +639,17 @@
    def _serialize(self):   chunks = [] - chunks.append("%s %s\n" % (TREE_ID, self._tree)) + chunks.append("%s %s\n" % (_TREE_HEADER, self._tree))   for p in self._parents: - chunks.append("%s %s\n" % (PARENT_ID, p)) - chunks.append("%s %s %s %s\n" % (AUTHOR_ID, self._author, str(self._author_time), format_timezone(self._author_timezone))) - chunks.append("%s %s %s %s\n" % (COMMITTER_ID, self._committer, str(self._commit_time), format_timezone(self._commit_timezone))) + chunks.append("%s %s\n" % (_PARENT_HEADER, p)) + chunks.append("%s %s %s %s\n" % ( + _AUTHOR_HEADER, self._author, str(self._author_time), + format_timezone(self._author_timezone))) + chunks.append("%s %s %s %s\n" % ( + _COMMITTER_HEADER, self._committer, str(self._commit_time), + format_timezone(self._commit_timezone)))   if self.encoding: - chunks.append("%s %s\n" % (ENCODING_ID, self.encoding)) + chunks.append("%s %s\n" % (_ENCODING_HEADER, self.encoding))   for k, v in self.extra:   if "\n" in k or "\n" in v:   raise AssertionError("newline in extra data: %r -> %r" % (k, v)) @@ -685,21 +705,19 @@
  "Encoding of the commit message.")     -type_map = { - BLOB_ID : Blob, - TREE_ID : Tree, - COMMIT_ID : Commit, - TAG_ID: Tag, -} - -num_type_map = { - 0: None, - 1: Commit, - 2: Tree, - 3: Blob, - 4: Tag, - # 5 Is reserved for further expansion -} +OBJECT_CLASSES = ( + Commit, + Tree, + Blob, + Tag, + ) + +_TYPE_MAP = {} + +for cls in OBJECT_CLASSES: + _TYPE_MAP[cls.type_name] = cls + _TYPE_MAP[cls.type_num] = cls +    try:   # Try to import C versions
Change 1 of 2 Show Entire File dulwich/​pack.py Stacked
 
833
834
835
836
 
837
838
839
 
848
849
850
851
 
852
853
854
855
856
857
858
 
859
860
861
 
833
834
835
 
836
837
838
839
 
848
849
850
 
851
852
853
854
855
856
857
 
858
859
860
861
@@ -833,7 +833,7 @@
  # This helps us find good objects to diff against us   magic = []   for obj, path in recency: - magic.append( (obj.type, path, 1, -obj.raw_length(), obj) ) + magic.append( (obj.type_num, path, 1, -obj.raw_length(), obj) )   magic.sort()   # Build a map of objects and their index in magic - so we can find preceeding objects   # to diff against @@ -848,14 +848,14 @@
  f.write(struct.pack(">L", num_objects)) # Number of objects in pack   for o, path in recency:   sha1 = o.sha().digest() - orig_t = o.type + orig_t = o.type_num   raw = o.as_raw_string()   winner = raw   t = orig_t   #for i in range(offs[o]-window, window):   # if i < 0 or i >= len(offs): continue   # b = magic[i][4] - # if b.type != orig_t: continue + # if b.type_num != orig_t: continue   # base = b.as_raw_string()   # delta = create_delta(base, raw)   # if len(delta) < len(winner):
Change 1 of 4 Show Entire File dulwich/​repo.py Stacked
 
49
50
51
52
 
53
54
55
 
698
699
700
701
 
702
703
704
 
708
709
710
711
 
 
712
713
714
 
784
785
786
787
788
789
 
 
 
790
791
792
 
49
50
51
 
52
53
54
55
 
698
699
700
 
701
702
703
704
 
708
709
710
 
711
712
713
714
715
 
785
786
787
 
 
 
788
789
790
791
792
793
@@ -49,7 +49,7 @@
  Tag,   Tree,   hex_to_sha, - num_type_map, + object_class,   )  import warnings   @@ -698,7 +698,7 @@
  def _get_object(self, sha, cls):   assert len(sha) in (20, 40)   ret = self.get_object(sha) - if ret._type != cls._type: + if not isinstance(ret, cls):   if cls is Commit:   raise NotCommitError(ret)   elif cls is Blob: @@ -708,7 +708,8 @@
  elif cls is Tag:   raise NotTagError(ret)   else: - raise Exception("Type invalid: %r != %r" % (ret._type, cls._type)) + raise Exception("Type invalid: %r != %r" % ( + ret.type_name, cls.type_name))   return ret     def get_object(self, sha): @@ -784,9 +785,9 @@
  if cached is not None:   return cached   obj = self[ref] - obj_type = num_type_map[obj.type] - while obj_type == Tag: - obj_type, sha = obj.object + obj_class = object_class(obj.type_name) + while obj_class is Tag: + obj_class, sha = obj.object   obj = self.get_object(sha)   return obj.id  
 
183
184
185
186
 
187
188
189
 
190
191
192
 
193
194
195
 
285
286
287
288
 
183
184
185
 
186
187
188
 
189
190
191
 
192
193
194
195
 
285
286
287
 
@@ -183,13 +183,13 @@
  """Tests random access for non-delta objects"""   p = self.get_pack(pack1_sha)   obj = p[a_sha] - self.assertEqual(obj._type, 'blob') + self.assertEqual(obj.type_name, 'blob')   self.assertEqual(obj.sha().hexdigest(), a_sha)   obj = p[tree_sha] - self.assertEqual(obj._type, 'tree') + self.assertEqual(obj.type_name, 'tree')   self.assertEqual(obj.sha().hexdigest(), tree_sha)   obj = p[commit_sha] - self.assertEqual(obj._type, 'commit') + self.assertEqual(obj.type_name, 'commit')   self.assertEqual(obj.sha().hexdigest(), commit_sha)     def test_copy(self): @@ -285,4 +285,3 @@
  def test_simple_decompress(self):   self.assertEquals((["tree 4ada885c9196b6b6fa08744b5862bf92896fc002\nparent None\nauthor Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\ncommitter Jelmer Vernooij <jelmer@samba.org> 1228980214 +0000\n\nProvide replacement for mmap()'s offset argument."], 158, 'Z'),   read_zlib_chunks(StringIO(TEST_COMP1).read, 229)) -
 
92
93
94
95
 
96
97
98
99
100
 
 
101
102
103
104
 
105
106
107
 
109
110
111
112
113
 
 
114
115
116
 
119
120
121
122
 
123
124
125
 
128
129
130
131
 
132
133
 
134
135
136
 
147
148
149
150
 
151
152
153
 
 
154
155
156
 
190
191
192
193
 
194
195
 
196
197
198
 
92
93
94
 
95
96
97
98
 
 
99
100
101
102
103
 
104
105
106
107
 
109
110
111
 
 
112
113
114
115
116
 
119
120
121
 
122
123
124
125
 
128
129
130
 
131
132
 
133
134
135
136
 
147
148
149
 
150
151
 
 
152
153
154
155
156
 
190
191
192
 
193
194
 
195
196
197
198
@@ -92,16 +92,16 @@
  def test_head(self):   r = self._repo = open_repo('a.git')   self.assertEqual(r.head(), 'a90fa2d900a17e99b433217e988c4eb4a2e9a097') - +   def test_get_object(self):   r = self._repo = open_repo('a.git')   obj = r.get_object(r.head()) - self.assertEqual(obj._type, 'commit') - + self.assertEqual(obj.type_name, 'commit') +   def test_get_object_non_existant(self):   r = self._repo = open_repo('a.git')   self.assertRaises(KeyError, r.get_object, missing_sha) - +   def test_commit(self):   r = self._repo = open_repo('a.git')   warnings.simplefilter("ignore", DeprecationWarning) @@ -109,8 +109,8 @@
  obj = r.commit(r.head())   finally:   warnings.resetwarnings() - self.assertEqual(obj._type, 'commit') - + self.assertEqual(obj.type_name, 'commit') +   def test_commit_not_commit(self):   r = self._repo = open_repo('a.git')   warnings.simplefilter("ignore", DeprecationWarning) @@ -119,7 +119,7 @@
  r.commit, '4f2e6529203aa6d44b5af6e3292c837ceda003f9')   finally:   warnings.resetwarnings() - +   def test_tree(self):   r = self._repo = open_repo('a.git')   commit = r[r.head()] @@ -128,9 +128,9 @@
  tree = r.tree(commit.tree)   finally:   warnings.resetwarnings() - self.assertEqual(tree._type, 'tree') + self.assertEqual(tree.type_name, 'tree')   self.assertEqual(tree.sha().hexdigest(), commit.tree) - +   def test_tree_not_tree(self):   r = self._repo = open_repo('a.git')   warnings.simplefilter("ignore", DeprecationWarning) @@ -147,10 +147,10 @@
  tag = r.tag(tag_sha)   finally:   warnings.resetwarnings() - self.assertEqual(tag._type, 'tag') + self.assertEqual(tag.type_name, 'tag')   self.assertEqual(tag.sha().hexdigest(), tag_sha) - obj_type, obj_sha = tag.object - self.assertEqual(obj_type, objects.Commit) + obj_class, obj_sha = tag.object + self.assertEqual(obj_class, objects.Commit)   self.assertEqual(obj_sha, r.head())     def test_tag_not_tag(self): @@ -190,9 +190,9 @@
  blob = r.get_blob(blob_sha)   finally:   warnings.resetwarnings() - self.assertEqual(blob._type, 'blob') + self.assertEqual(blob.type_name, 'blob')   self.assertEqual(blob.sha().hexdigest(), blob_sha) - +   def test_get_blob_notblob(self):   r = self._repo = open_repo('a.git')   warnings.simplefilter("ignore", DeprecationWarning)
 
96
97
98
99
100
101
 
102
103
 
104
105
106
107
108
109
110
 
112
113
114
115
 
116
117
118
 
96
97
98
 
 
 
99
100
 
101
102
103
 
 
104
105
106
 
108
109
110
 
111
112
113
114
@@ -96,15 +96,11 @@
  self._environ['QUERY_STRING'] = ''     class TestTag(object): - type = Tag().type - - def __init__(self, sha, obj_type, obj_sha): + def __init__(self, sha, obj_class, obj_sha):   self.sha = lambda: sha - self.object = (obj_type, obj_sha) + self.object = (obj_class, obj_sha)     class TestBlob(object): - type = Blob().type -   def __init__(self, sha):   self.sha = lambda: sha   @@ -112,7 +108,7 @@
  blob2 = TestBlob('222')   blob3 = TestBlob('333')   - tag1 = TestTag('aaa', TestBlob.type, '222') + tag1 = TestTag('aaa', Blob, '222')     class TestRepo(object):   def __init__(self, objects, peeled):