tamprolla_update.py - amprolla - devuan's apt repo merger
HTML git clone https://git.parazyd.org/amprolla
DIR Log
DIR Files
DIR Refs
DIR README
DIR LICENSE
---
tamprolla_update.py (4659B)
---
1 #!/usr/bin/env python3
2 # see LICENSE file for copyright and license details
3
4 """
5 Perform incremental updates
6 """
7
8 from os.path import join
9 from multiprocessing import Pool
10 from time import time
11 import requests
12
13 import lib.globalvars as globalvars
14 from lib.config import aliases, cpunm, repos, repo_order, spooldir, skips
15 from lib.lock import check_lock, free_lock
16 from lib.log import info, warn, die
17 from lib.parse import compare_dict, get_date, get_time, parse_release
18 from lib.net import download
19 from amprolla_merge import gen_release, merge, prepare_merge_dict
20
21
22 def remote_is_newer(remote, local):
23 """
24 Checks if a remote Release file holds a newer date, and returns True if so
25 """
26 rem_date = get_date(remote)
27 loc_date = get_date(local)
28
29 # print('Remote date: %s' % rem_date)
30 # print('Local date: %s' % loc_date)
31
32 if get_time(rem_date) > get_time(loc_date):
33 info('Remote Release is newer!')
34 return True
35
36 return False
37
38
39 def perform_update(suite, paths):
40 """
41 Performs an incremental update and merge of a given suite
42 """
43 info('Checking for updates in %s' % suite)
44 # print(paths)
45 globalvars.suite = suite
46 globalvars.rehash = False
47
48 needsmerge = {}
49 needsmerge['downloads'] = [] # all files that have to be downloaded
50 regenrelease = False
51 cnt = 0
52 for i in repo_order:
53 # i = repository name
54 needsmerge[i] = {}
55 needsmerge[i]['mergelist'] = []
56
57 if paths[cnt]:
58 info('Working on %s repo' % i)
59 remote_path = paths[cnt].replace(spooldir, repos[i]['host'])
60 try:
61 remote_rel = requests.get(join(remote_path, 'Release'))
62 except requests.exceptions.ConnectionError as err:
63 warn('Caught exception: "%s". Retrying...' % err)
64 return perform_update(suite, paths)
65
66 local_rel_text = open(join(paths[cnt], 'Release')).read()
67
68 diffs = {}
69 if remote_is_newer(remote_rel.text, local_rel_text):
70 download((join(remote_path, 'Release'),
71 join(paths[cnt], 'Release')))
72 regenrelease = True
73
74 diffs = compare_dict(parse_release(remote_rel.text),
75 parse_release(local_rel_text))
76 if diffs:
77 globalvars.rehash = True
78 for k in diffs:
79 if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
80 needsmerge[i]['mergelist'].append(k)
81 rmt = join(paths[cnt].replace(spooldir, repos[i]['host']), k)
82 loc = join(paths[cnt], k)
83 dlf = (rmt, loc)
84 needsmerge['downloads'].append(dlf)
85
86 cnt += 1
87 # break
88
89 # download what needs to be downloaded
90 if needsmerge['downloads']:
91 info('Downloading updates...')
92 dlpool = Pool(cpunm)
93 dlpool.map(download, needsmerge['downloads'])
94
95 # create union of our Packages.gz and Sources.gz files we will merge
96 uni = []
97 for i in repo_order:
98 uni.append(needsmerge[i]['mergelist'])
99 updpkg_list = set().union(*uni)
100
101 # make a list of package lists to feed into merge()
102 merge_list = []
103 for i in updpkg_list:
104 pkgs = []
105 for j in repo_order:
106 sui = suite
107 # append valid aliases
108 if repos[j]['aliases']:
109 if suite in aliases[repos[j]['name']]:
110 sui = aliases[repos[j]['name']][suite]
111 elif repos[j]['skipmissing']:
112 sui = None
113 if j == 'debian' and suite in skips:
114 sui = None
115
116 if sui:
117 pkgs.append(join(spooldir, repos[j]['dists'], sui, i))
118 else:
119 pkgs.append(None)
120
121 merge_list.append(pkgs)
122
123 # perform the actual merge
124 if merge_list:
125 info('Merging files...')
126 mrgpool = Pool(cpunm)
127 mrgpool.map(merge, merge_list)
128
129 # generate Release files if we got any new files
130 if needsmerge['downloads'] or regenrelease:
131 info('Generating Release...')
132 gen_release(suite)
133
134
135 def main():
136 """
137 Do the update for all repos
138 """
139 roots = prepare_merge_dict()
140 for suite, paths in roots.items():
141 perform_update(suite, paths)
142 # break
143
144
145 if __name__ == '__main__':
146 try:
147 t1 = time()
148 check_lock()
149 main()
150 free_lock()
151 t2 = time()
152 info('Total incremental update time: %s' % (t2 - t1), tofile=True)
153 except Exception as e:
154 die(e)