Kiln » Dependencies » Dulwich Read More
Clone URL:  
Pushed to one repository · View In Graph Contained in master

More docstrings, test coverage.

Changeset 15c10cbfeaf7

Parent 41b91297de19

by Jelmer Vernooij

Changes to 4 files · Browse files at 15c10cbfeaf7 Showing diff from parent 41b91297de19 Diff from another changeset...

Change 1 of 1 Show Entire File dulwich/​index.py Stacked
 
258
259
260
 
 
 
 
 
261
 
258
259
260
261
262
263
264
265
266
@@ -258,4 +258,9 @@
     def commit_index(object_store, index): + """Create a new tree from an index. + + :param object_store: Object store to save the tree in + :param index: Index file + """   return commit_tree(object_store, index.iterblobs())
 
132
133
134
135
136
 
 
 
 
 
137
138
139
 
319
320
321
 
322
323
324
325
326
327
328
 
329
330
 
331
332
333
 
335
336
337
 
338
339
340
341
342
343
 
344
345
346
 
403
404
405
 
 
 
 
 
406
407
408
409
410
 
411
412
413
414
 
415
416
417
418
 
419
420
421
 
425
426
427
 
 
 
 
428
429
430
431
432
433
 
 
 
 
 
 
434
435
436
 
132
133
134
 
135
136
137
138
139
140
141
142
143
 
323
324
325
326
327
328
329
330
331
332
 
333
334
335
336
337
338
339
 
341
342
343
344
345
346
347
348
349
350
351
352
353
354
 
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
 
441
442
443
444
445
446
447
448
449
450
451
452
 
453
454
455
456
457
458
459
460
461
@@ -132,8 +132,12 @@
  """   return ObjectStoreGraphWalker(heads, lambda sha: self[sha].parents)   -   def generate_pack_contents(self, have, want): + """Iterate over the contents of a pack file. + + :param have: List of SHA1s of objects that should not be sent + :param want: List of SHA1s of objects that should be sent + """   return self.iter_shas(self.find_missing_objects(have, want))     @@ -319,15 +323,17 @@
  def add_object(self, obj):   """Add a single object to this object store.   + :param obj: Object to add   """   self._add_shafile(obj.id, obj)     def add_objects(self, objects):   """Add a set of objects to this object store.   - :param objects: Iterable over a list of objects. + :param objects: Iterable over objects, should support __len__.   """   if len(objects) == 0: + # Don't bother writing an empty pack file   return   f, commit = self.add_pack()   write_pack_data(f, objects, len(objects)) @@ -335,12 +341,14 @@
     class MemoryObjectStore(BaseObjectStore): + """Object store that keeps all objects in memory."""     def __init__(self):   super(MemoryObjectStore, self).__init__()   self._data = {}     def __contains__(self, sha): + """Check if the object with a particular SHA is present."""   return sha in self._data     def __iter__(self): @@ -403,19 +411,27 @@
  """ObjectIterator that works on top of an ObjectStore."""     def __init__(self, store, sha_iter): + """Create a new ObjectIterator. + + :param store: Object store to retrieve from + :param sha_iter: Iterator over (sha, path) tuples + """   self.store = store   self.sha_iter = sha_iter   self._shas = []     def __iter__(self): + """Yield tuple with next object and path."""   for sha, path in self.itershas():   yield self.store[sha], path     def iterobjects(self): + """Iterate over just the objects."""   for o, path in self:   yield o     def itershas(self): + """Iterate over the SHAs."""   for sha in self._shas:   yield sha   for sha in self.sha_iter: @@ -425,12 +441,21 @@
  def __contains__(self, needle):   """Check if an object is present.   + :note: This checks if the object is present in + the underlying object store, not if it would + be yielded by the iterator. +   :param needle: SHA1 of the object to check for   """   return needle in self.store     def __getitem__(self, key): - """Find an object by SHA1.""" + """Find an object by SHA1. + + :note: This retrieves the object from the underlying + object store. It will also succeed if the object would + not be returned by the iterator. + """   return self.store[key]     def __len__(self):
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
 
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
 
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
 # objects.py -- Access to base git objects  # Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>  # Copyright (C) 2008-2009 Jelmer Vernooij <jelmer@samba.org>  #  # This program is free software; you can redistribute it and/or  # modify it under the terms of the GNU General Public License  # as published by the Free Software Foundation; version 2  # of the License or (at your option) a later version of the License.  #  # This program is distributed in the hope that it will be useful,  # but WITHOUT ANY WARRANTY; without even the implied warranty of  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the  # GNU General Public License for more details.  #  # You should have received a copy of the GNU General Public License  # along with this program; if not, write to the Free Software  # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,  # MA 02110-1301, USA.      """Access to base git objects."""      from cStringIO import (   StringIO,   )  import mmap  import os  import stat  import time  import zlib    from dulwich.errors import (   NotBlobError,   NotCommitError,   NotTreeError,   )  from dulwich.misc import (   make_sha,   )    BLOB_ID = "blob"  TAG_ID = "tag"  TREE_ID = "tree"  COMMIT_ID = "commit"  PARENT_ID = "parent"  AUTHOR_ID = "author"  COMMITTER_ID = "committer"  OBJECT_ID = "object"  TYPE_ID = "type"  TAGGER_ID = "tagger"  ENCODING_ID = "encoding"    S_IFGITLINK = 0160000  def S_ISGITLINK(m):   return (stat.S_IFMT(m) == S_IFGITLINK)    def _decompress(string):   dcomp = zlib.decompressobj()   dcomped = dcomp.decompress(string)   dcomped += dcomp.flush()   return dcomped      def sha_to_hex(sha):   """Takes a string and returns the hex of the sha within"""   hexsha = "".join(["%02x" % ord(c) for c in sha])   assert len(hexsha) == 40, "Incorrect length of sha1 string: %d" % hexsha   return hexsha      def hex_to_sha(hex):   """Takes a hex sha and returns a binary sha"""   assert len(hex) == 40, "Incorrent length of hexsha: %s" % hex   return ''.join([chr(int(hex[i:i+2], 16)) for i in xrange(0, len(hex), 2)])      def serializable_property(name, docstring=None):   def set(obj, value):   obj._ensure_parsed()   setattr(obj, "_"+name, value)   obj._needs_serialization = True   def get(obj):   obj._ensure_parsed()   return getattr(obj, "_"+name)   return property(get, set, doc=docstring)      class ShaFile(object):   """A git SHA file."""     @classmethod   def _parse_legacy_object(cls, map):   """Parse a legacy object, creating it and setting object._text"""   text = _decompress(map)   object = None   for posstype in type_map.keys():   if text.startswith(posstype):   object = type_map[posstype]()   text = text[len(posstype):]   break   assert object is not None, "%s is not a known object type" % text[:9]   assert text[0] == ' ', "%s is not a space" % text[0]   text = text[1:]   size = 0   i = 0   while text[0] >= '0' and text[0] <= '9':   if i > 0 and size == 0: - assert False, "Size is not in canonical format" + raise AssertionError("Size is not in canonical format")   size = (size * 10) + int(text[0])   text = text[1:]   i += 1   object._size = size   assert text[0] == "\0", "Size not followed by null"   text = text[1:]   object.set_raw_string(text)   return object     def as_legacy_object(self):   text = self.as_raw_string()   return zlib.compress("%s %d\0%s" % (self._type, len(text), text))     def as_raw_string(self):   if self._needs_serialization:   self.serialize()   return self._text     def __str__(self):   return self.as_raw_string()     def as_pretty_string(self):   return self.as_raw_string()     def _ensure_parsed(self):   if self._needs_parsing:   self._parse_text()     def set_raw_string(self, text):   if type(text) != str:   raise TypeError(text)   self._text = text   self._sha = None   self._needs_parsing = True   self._needs_serialization = False     @classmethod   def _parse_object(cls, map):   """Parse a new style object , creating it and setting object._text"""   used = 0   byte = ord(map[used])   used += 1   num_type = (byte >> 4) & 7   try:   object = num_type_map[num_type]()   except KeyError:   raise AssertionError("Not a known type: %d" % num_type)   while (byte & 0x80) != 0:   byte = ord(map[used])   used += 1   raw = map[used:]   object.set_raw_string(_decompress(raw))   return object     @classmethod   def _parse_file(cls, map):   word = (ord(map[0]) << 8) + ord(map[1])   if ord(map[0]) == 0x78 and (word % 31) == 0:   return cls._parse_legacy_object(map)   else:   return cls._parse_object(map)     def __init__(self):   """Don't call this directly"""   self._sha = None     def _parse_text(self):   """For subclasses to do initialisation time parsing"""     @classmethod   def from_file(cls, filename):   """Get the contents of a SHA file on disk"""   size = os.path.getsize(filename)   f = open(filename, 'rb')   try:   map = mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ)   shafile = cls._parse_file(map)   return shafile   finally:   f.close()     @classmethod   def from_raw_string(cls, type, string):   """Creates an object of the indicated type from the raw string given.     Type is the numeric type of an object. String is the raw uncompressed   contents.   """   real_class = num_type_map[type]   obj = real_class()   obj.type = type   obj.set_raw_string(string)   return obj     def _header(self):   return "%s %lu\0" % (self._type, len(self.as_raw_string()))     def sha(self):   """The SHA1 object that is the name of this object."""   if self._needs_serialization or self._sha is None:   self._sha = make_sha()   self._sha.update(self._header())   self._sha.update(self.as_raw_string())   return self._sha     @property   def id(self):   return self.sha().hexdigest()     def get_type(self):   return self._num_type     def set_type(self, type):   self._num_type = type     type = property(get_type, set_type)     def __repr__(self):   return "<%s %s>" % (self.__class__.__name__, self.id)     def __ne__(self, other):   return self.id != other.id     def __eq__(self, other):   """Return true id the sha of the two objects match.     The __le__ etc methods aren't overriden as they make no sense,   certainly at this level.   """   return self.id == other.id      class Blob(ShaFile):   """A Git Blob object."""     _type = BLOB_ID   _num_type = 3   _needs_serialization = False   _needs_parsing = False     def get_data(self):   return self._text     def set_data(self, data):   self._text = data     data = property(get_data, set_data,   "The text contained within the blob object.")     @classmethod   def from_file(cls, filename):   blob = ShaFile.from_file(filename)   if blob._type != cls._type:   raise NotBlobError(filename)   return blob     @classmethod   def from_string(cls, string):   """Create a blob from a string."""   shafile = cls()   shafile.set_raw_string(string)   return shafile      class Tag(ShaFile):   """A Git Tag object."""     _type = TAG_ID   _num_type = 4     def __init__(self):   super(Tag, self).__init__()   self._needs_parsing = False   self._needs_serialization = True     @classmethod   def from_file(cls, filename):   blob = ShaFile.from_file(filename)   if blob._type != cls._type:   raise NotBlobError(filename)   return blob     @classmethod   def from_string(cls, string):   """Create a blob from a string."""   shafile = cls()   shafile.set_raw_string(string)   return shafile     def serialize(self):   f = StringIO()   f.write("%s %s\n" % (OBJECT_ID, self._object_sha))   f.write("%s %s\n" % (TYPE_ID, num_type_map[self._object_type]._type))   f.write("%s %s\n" % (TAG_ID, self._name))   if self._tagger:   f.write("%s %s %d %s\n" % (TAGGER_ID, self._tagger, self._tag_time, format_timezone(self._tag_timezone)))   f.write("\n") # To close headers   f.write(self._message)   self._text = f.getvalue()   self._needs_serialization = False     def _parse_text(self):   """Grab the metadata attached to the tag"""   self._tagger = None   f = StringIO(self._text)   for l in f:   l = l.rstrip("\n")   if l == "":   break # empty line indicates end of headers   (field, value) = l.split(" ", 1)   if field == OBJECT_ID:   self._object_sha = value   elif field == TYPE_ID:   self._object_type = type_map[value]   elif field == TAG_ID:   self._name = value   elif field == TAGGER_ID:   sep = value.index("> ")   self._tagger = value[0:sep+1]   (timetext, timezonetext) = value[sep+2:].rsplit(" ", 1)   try:   self._tag_time = int(timetext)   except ValueError: #Not a unix timestamp   self._tag_time = time.strptime(timetext)   self._tag_timezone = parse_timezone(timezonetext)   else:   raise AssertionError("Unknown field %s" % field)   self._message = f.read()   self._needs_parsing = False     def get_object(self):   """Returns the object pointed by this tag, represented as a tuple(type, sha)"""   self._ensure_parsed()   return (self._object_type, self._object_sha)     def set_object(self, value):   self._ensure_parsed()   (self._object_type, self._object_sha) = value   self._needs_serialization = True     object = property(get_object, set_object)     name = serializable_property("name", "The name of this tag")   tagger = serializable_property("tagger",   "Returns the name of the person who created this tag")   tag_time = serializable_property("tag_time",   "The creation timestamp of the tag. As the number of seconds since the epoch")   tag_timezone = serializable_property("tag_timezone",   "The timezone that tag_time is in.")   message = serializable_property("message", "The message attached to this tag")      def parse_tree(text):   ret = {}   count = 0   while count < len(text):   mode = 0   chr = text[count]   while chr != ' ':   assert chr >= '0' and chr <= '7', "%s is not a valid mode char" % chr   mode = (mode << 3) + (ord(chr) - ord('0'))   count += 1   chr = text[count]   count += 1   chr = text[count]   name = ''   while chr != '\0':   name += chr   count += 1   chr = text[count]   count += 1   chr = text[count]   sha = text[count:count+20]   hexsha = sha_to_hex(sha)   ret[name] = (mode, hexsha)   count = count + 20   return ret      class Tree(ShaFile):   """A Git tree object"""     _type = TREE_ID   _num_type = 2     def __init__(self):   super(Tree, self).__init__()   self._entries = {}   self._needs_parsing = False   self._needs_serialization = True     @classmethod   def from_file(cls, filename):   tree = ShaFile.from_file(filename)   if tree._type != cls._type:   raise NotTreeError(filename)   return tree     def __contains__(self, name):   self._ensure_parsed()   return name in self._entries     def __getitem__(self, name):   self._ensure_parsed()   return self._entries[name]     def __setitem__(self, name, value):   assert isinstance(value, tuple)   assert len(value) == 2   self._ensure_parsed()   self._entries[name] = value   self._needs_serialization = True     def __delitem__(self, name):   self._ensure_parsed()   del self._entries[name]   self._needs_serialization = True     def add(self, mode, name, hexsha):   assert type(mode) == int   assert type(name) == str   assert type(hexsha) == str   self._ensure_parsed()   self._entries[name] = mode, hexsha   self._needs_serialization = True     def entries(self):   """Return a list of tuples describing the tree entries"""   self._ensure_parsed()   # The order of this is different from iteritems() for historical reasons   return [(mode, name, hexsha) for (name, mode, hexsha) in self.iteritems()]     def iteritems(self):   def cmp_entry((name1, value1), (name2, value2)):   if stat.S_ISDIR(value1[0]):   name1 += "/"   if stat.S_ISDIR(value2[0]):   name2 += "/"   return cmp(name1, name2)   self._ensure_parsed()   for name, entry in sorted(self._entries.iteritems(), cmp=cmp_entry):   yield name, entry[0], entry[1]     def _parse_text(self):   """Grab the entries in the tree"""   self._entries = parse_tree(self._text)   self._needs_parsing = False     def serialize(self):   f = StringIO()   for name, mode, hexsha in self.iteritems():   f.write("%04o %s\0%s" % (mode, name, hex_to_sha(hexsha)))   self._text = f.getvalue()   self._needs_serialization = False     def as_pretty_string(self):   text = ""   for name, mode, hexsha in self.iteritems():   if mode & stat.S_IFDIR:   kind = "tree"   else:   kind = "blob"   text += "%04o %s %s\t%s\n" % (mode, kind, hexsha, name)   return text      def parse_timezone(text):   offset = int(text)   signum = (offset < 0) and -1 or 1   offset = abs(offset)   hours = int(offset / 100)   minutes = (offset % 100)   return signum * (hours * 3600 + minutes * 60)      def format_timezone(offset):   if offset % 60 != 0:   raise ValueError("Unable to handle non-minute offset.")   sign = (offset < 0) and '-' or '+'   offset = abs(offset)   return '%c%02d%02d' % (sign, offset / 3600, (offset / 60) % 60)      class Commit(ShaFile):   """A git commit object"""     _type = COMMIT_ID   _num_type = 1     def __init__(self):   super(Commit, self).__init__()   self._parents = []   self._encoding = None   self._needs_parsing = False   self._needs_serialization = True     @classmethod   def from_file(cls, filename):   commit = ShaFile.from_file(filename)   if commit._type != cls._type:   raise NotCommitError(filename)   return commit     def _parse_text(self):   self._parents = []   self._author = None   f = StringIO(self._text)   for l in f:   l = l.rstrip("\n")   if l == "":   # Empty line indicates end of headers   break   (field, value) = l.split(" ", 1)   if field == TREE_ID:   self._tree = value   elif field == PARENT_ID:   self._parents.append(value)   elif field == AUTHOR_ID:   self._author, timetext, timezonetext = value.rsplit(" ", 2)   self._author_time = int(timetext)   self._author_timezone = parse_timezone(timezonetext)   elif field == COMMITTER_ID:   self._committer, timetext, timezonetext = value.rsplit(" ", 2)   self._commit_time = int(timetext)   self._commit_timezone = parse_timezone(timezonetext)   elif field == ENCODING_ID:   self._encoding = value   else:   raise AssertionError("Unknown field %s" % field)   self._message = f.read()   self._needs_parsing = False     def serialize(self):   f = StringIO()   f.write("%s %s\n" % (TREE_ID, self._tree))   for p in self._parents:   f.write("%s %s\n" % (PARENT_ID, p))   f.write("%s %s %s %s\n" % (AUTHOR_ID, self._author, str(self._author_time), format_timezone(self._author_timezone)))   f.write("%s %s %s %s\n" % (COMMITTER_ID, self._committer, str(self._commit_time), format_timezone(self._commit_timezone)))   if self.encoding:   f.write("%s %s\n" % (ENCODING_ID, self.encoding))   f.write("\n") # There must be a new line after the headers   f.write(self._message)   self._text = f.getvalue()   self._needs_serialization = False     tree = serializable_property("tree", "Tree that is the state of this commit")     def get_parents(self):   """Return a list of parents of this commit."""   self._ensure_parsed()   return self._parents     def set_parents(self, value):   """Return a list of parents of this commit."""   self._ensure_parsed()   self._needs_serialization = True   self._parents = value     parents = property(get_parents, set_parents)     author = serializable_property("author",   "The name of the author of the commit")     committer = serializable_property("committer",   "The name of the committer of the commit")     message = serializable_property("message",   "The commit message")     commit_time = serializable_property("commit_time",   "The timestamp of the commit. As the number of seconds since the epoch.")     commit_timezone = serializable_property("commit_timezone",   "The zone the commit time is in")     author_time = serializable_property("author_time",   "The timestamp the commit was written. as the number of seconds since the epoch.")     author_timezone = serializable_property("author_timezone",   "Returns the zone the author time is in.")     encoding = serializable_property("encoding",   "Encoding of the commit message.")      type_map = {   BLOB_ID : Blob,   TREE_ID : Tree,   COMMIT_ID : Commit,   TAG_ID: Tag,  }    num_type_map = {   0: None,   1: Commit,   2: Tree,   3: Blob,   4: Tag,   # 5 Is reserved for further expansion  }    try:   # Try to import C versions   from dulwich._objects import hex_to_sha, sha_to_hex, parse_tree  except ImportError:   pass
Change 1 of 6 Show Entire File dulwich/​pack.py Stacked
 
164
165
166
 
 
 
 
 
 
 
 
167
168
169
 
220
221
222
 
 
 
223
224
225
 
248
249
250
 
251
252
253
 
277
278
279
 
280
281
282
 
434
435
436
437
 
438
439
440
 
487
488
489
490
 
491
492
493
 
164
165
166
167
168
169
170
171
172
173
174
175
176
177
 
228
229
230
231
232
233
234
235
236
 
259
260
261
262
263
264
265
 
289
290
291
292
293
294
295
 
447
448
449
 
450
451
452
453
 
500
501
502
 
503
504
505
506
@@ -164,6 +164,14 @@
     def bisect_find_sha(start, end, sha, unpack_name): + """Find a SHA in a data blob with sorted SHAs. + + :param start: Start index of range to search + :param end: End index of range to search + :param sha: Sha to find + :param unpack_name: Callback to retrieve SHA by index + :return: Index of the SHA, or None if it wasn't found + """   assert start <= end   while start <= end:   i = (start + end)/2 @@ -220,6 +228,9 @@
  if name1 != name2:   return False   return True + + def __ne__(self, other): + return not self.__eq__(other)     def close(self):   self._file.close() @@ -248,6 +259,7 @@
  raise NotImplementedError(self._unpack_crc32_checksum)     def __iter__(self): + """Iterate over the SHAs in this pack."""   return imap(sha_to_hex, self._itersha())     def _itersha(self): @@ -277,6 +289,7 @@
    def check(self):   """Check that the stored checksum matches the actual checksum.""" + # TODO: Check pack contents, too   return self.calculate_checksum() == self.get_stored_checksum()     def calculate_checksum(self): @@ -434,7 +447,7 @@
  return type, uncomp, comp_len+raw_base     -def compute_object_size((num, obj)): +def _compute_object_size((num, obj)):   """Compute the size of a unresolved object for use with LRUSizeCache.   """   if num in (6, 7): @@ -487,7 +500,7 @@
  self._file = open(self._filename, 'rb')   self._read_header()   self._offset_cache = LRUSizeCache(1024*1024*20, - compute_size=compute_object_size) + compute_size=_compute_object_size)     def close(self):   self._file.close()