move compression functions to module dedup.compression
[~helmut/debian-dedup.git] / importpkg.py
1 #!/usr/bin/python
2 """
3 CREATE TABLE package (package TEXT PRIMARY KEY, version TEXT, architecture TEXT);
4 CREATE TABLE content (package TEXT, filename TEXT, size INTEGER, function TEXT, hash TEXT, FOREIGN KEY (package) REFERENCES package(package));
5 CREATE TABLE dependency (package TEXT, required TEXT, FOREIGN KEY (package) REFERENCES package(package), FOREIGN KEY (required) REFERENCES package(package));
6 CREATE INDEX content_package_index ON content (package);
7 CREATE INDEX content_hash_index ON content (hash);
8 """
9
10 import hashlib
11 import sqlite3
12 import struct
13 import sys
14 import tarfile
15 import zlib
16
17 from debian.debian_support import version_compare
18 from debian import deb822
19 import lzma
20
21 from dedup.hashing import HashBlacklist, DecompressedHash, SuppressingHash, hash_file
22 from dedup.compression import GzipDecompressor, DecompressedStream
23
24 class ArReader(object):
25     global_magic = b"!<arch>\n"
26     file_magic = b"`\n"
27
28     def __init__(self, fileobj):
29         self.fileobj = fileobj
30         self.remaining = None
31         self.padding = 0
32
33     def skip(self, length):
34         while length:
35             data = self.fileobj.read(min(4096, length))
36             if not data:
37                 raise ValueError("archive truncated")
38             length -= len(data)
39
40     def read_magic(self):
41         data = self.fileobj.read(len(self.global_magic))
42         if data != self.global_magic:
43             raise ValueError("ar global header not found")
44         self.remaining = 0
45
46     def read_entry(self):
47         self.skip_current_entry()
48         if self.padding:
49             if self.fileobj.read(1) != '\n':
50                 raise ValueError("missing ar padding")
51             self.padding = 0
52         file_header = self.fileobj.read(60)
53         if not file_header:
54             raise EOFError("end of archive found")
55         parts = struct.unpack("16s 12s 6s 6s 8s 10s 2s", file_header)
56         parts = [p.rstrip(" ") for p in parts]
57         if parts.pop() != self.file_magic:
58             raise ValueError("ar file header not found")
59         self.remaining = int(parts[5])
60         self.padding = self.remaining % 2
61         return parts[0] # name
62
63     def skip_current_entry(self):
64         self.skip(self.remaining)
65         self.remaining = 0
66
67     def read(self, length=None):
68         if length is None:
69             length = self.remaining
70         else:
71             length = min(self.remaining, length)
72         data = self.fileobj.read(length)
73         self.remaining -= len(data)
74         return data
75
76 class MultiHash(object):
77     def __init__(self, *hashes):
78         self.hashes = hashes
79
80     def update(self, data):
81         for hasher in self.hashes:
82             hasher.update(data)
83
84 boring_sha512_hashes = set((
85     # ""
86     "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
87     # "\n"
88     "be688838ca8686e5c90689bf2ab585cef1137c999b48c70b92f67a5c34dc15697b5d11c982ed6d71be1e1e7f7b4e0733884aa97c3f7a339a8ed03577cf74be09"))
89
90 def sha512_nontrivial():
91     return HashBlacklist(hashlib.sha512(), boring_sha512_hashes)
92
93 def gziphash():
94     hashobj = DecompressedHash(GzipDecompressor(), hashlib.sha512())
95     hashobj = SuppressingHash(hashobj, (ValueError, zlib.error))
96     hashobj.name = "gzip_sha512"
97     return HashBlacklist(hashobj, boring_sha512_hashes)
98
99 def get_hashes(tar):
100     for elem in tar:
101         if not elem.isreg(): # excludes hard links as well
102             continue
103         hasher = MultiHash(sha512_nontrivial(), gziphash())
104         hasher = hash_file(hasher, tar.extractfile(elem))
105         for hashobj in hasher.hashes:
106             hashvalue = hashobj.hexdigest()
107             if hashvalue:
108                 yield (elem.name, elem.size, hashobj.name, hashvalue)
109
110 def process_package(db, filelike):
111     cur = db.cursor()
112     af = ArReader(filelike)
113     af.read_magic()
114     state = "start"
115     while True:
116         try:
117             name = af.read_entry()
118         except EOFError:
119             break
120         if name == "control.tar.gz":
121             if state != "start":
122                 raise ValueError("unexpected control.tar.gz")
123             state = "control"
124             tf = tarfile.open(fileobj=af, mode="r|gz")
125             for elem in tf:
126                 if elem.name != "./control":
127                     continue
128                 if state != "control":
129                     raise ValueError("duplicate control file")
130                 state = "control_file"
131                 control = tf.extractfile(elem).read()
132                 control = deb822.Packages(control)
133                 package = control["package"].encode("ascii")
134                 version = control["version"].encode("ascii")
135                 architecture = control["architecture"].encode("ascii")
136
137                 cur.execute("SELECT version FROM package WHERE package = ?;",
138                             (package,))
139                 row = cur.fetchone()
140                 if row and version_compare(row[0], version) > 0:
141                     return # already seen a newer package
142
143                 cur.execute("DELETE FROM package WHERE package = ?;",
144                             (package,))
145                 cur.execute("DELETE FROM content WHERE package = ?;",
146                             (package,))
147                 cur.execute("INSERT INTO package (package, version, architecture) VALUES (?, ?, ?);",
148                             (package, version, architecture))
149                 depends = control.relations.get("depends", [])
150                 depends = set(dep[0]["name"].encode("ascii")
151                               for dep in depends if len(dep) == 1)
152                 cur.execute("DELETE FROM dependency WHERE package = ?;",
153                             (package,))
154                 cur.executemany("INSERT INTO dependency (package, required) VALUES (?, ?);",
155                                 ((package, dep) for dep in depends))
156                 break
157             continue
158         elif name == "data.tar.gz":
159             tf = tarfile.open(fileobj=af, mode="r|gz")
160         elif name == "data.tar.bz2":
161             tf = tarfile.open(fileobj=af, mode="r|bz2")
162         elif name == "data.tar.xz":
163             zf = DecompressedStream(af, lzma.LZMADecompressor())
164             tf = tarfile.open(fileobj=zf, mode="r|")
165         else:
166             continue
167         if state != "control_file":
168             raise ValueError("missing control file")
169         for name, size, function, hexhash in get_hashes(tf):
170             cur.execute("INSERT INTO content (package, filename, size, function, hash) VALUES (?, ?, ?, ?, ?);",
171                         (package, name.decode("utf8"), size, function, hexhash))
172         db.commit()
173         return
174     raise ValueError("data.tar not found")
175
176 def main():
177     db = sqlite3.connect("test.sqlite3")
178     process_package(db, sys.stdin)
179
180 if __name__ == "__main__":
181     main()