-
Notifications
You must be signed in to change notification settings - Fork 10
/
amprolla_update.py
executable file
·154 lines (127 loc) · 4.55 KB
/
amprolla_update.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
#!/usr/bin/env python3
# see LICENSE file for copyright and license details
"""
Perform incremental updates
"""
from os.path import join
from multiprocessing import Pool
from time import time
import requests
import lib.globalvars as globalvars
from lib.config import aliases, cpunm, repos, repo_order, spooldir, skips
from lib.lock import check_lock, free_lock
from lib.log import info, warn, die
from lib.parse import compare_dict, get_date, get_time, parse_release
from lib.net import download
from amprolla_merge import gen_release, merge, prepare_merge_dict
def remote_is_newer(remote, local):
"""
Checks if a remote Release file holds a newer date, and returns True if so
"""
rem_date = get_date(remote)
loc_date = get_date(local)
# print('Remote date: %s' % rem_date)
# print('Local date: %s' % loc_date)
if get_time(rem_date) > get_time(loc_date):
info('Remote Release is newer!')
return True
return False
def perform_update(suite, paths):
"""
Performs an incremental update and merge of a given suite
"""
info('Checking for updates in %s' % suite)
# print(paths)
globalvars.suite = suite
globalvars.rehash = False
needsmerge = {}
needsmerge['downloads'] = [] # all files that have to be downloaded
regenrelease = False
cnt = 0
for i in repo_order:
# i = repository name
needsmerge[i] = {}
needsmerge[i]['mergelist'] = []
if paths[cnt]:
info('Working on %s repo' % i)
remote_path = paths[cnt].replace(spooldir, repos[i]['host'])
try:
remote_rel = requests.get(join(remote_path, 'Release'))
except requests.exceptions.ConnectionError as err:
warn('Caught exception: "%s". Retrying...' % err)
return perform_update(suite, paths)
local_rel_text = open(join(paths[cnt], 'Release')).read()
diffs = {}
if remote_is_newer(remote_rel.text, local_rel_text):
download((join(remote_path, 'Release'),
join(paths[cnt], 'Release')))
regenrelease = True
diffs = compare_dict(parse_release(remote_rel.text),
parse_release(local_rel_text))
if diffs:
globalvars.rehash = True
for k in diffs:
if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
needsmerge[i]['mergelist'].append(k)
rmt = join(paths[cnt].replace(spooldir, repos[i]['host']), k)
loc = join(paths[cnt], k)
dlf = (rmt, loc)
needsmerge['downloads'].append(dlf)
cnt += 1
# break
# download what needs to be downloaded
if needsmerge['downloads']:
info('Downloading updates...')
dlpool = Pool(cpunm)
dlpool.map(download, needsmerge['downloads'])
# create union of our Packages.gz and Sources.gz files we will merge
uni = []
for i in repo_order:
uni.append(needsmerge[i]['mergelist'])
updpkg_list = set().union(*uni)
# make a list of package lists to feed into merge()
merge_list = []
for i in updpkg_list:
pkgs = []
for j in repo_order:
sui = suite
# append valid aliases
if repos[j]['aliases']:
if suite in aliases[repos[j]['name']]:
sui = aliases[repos[j]['name']][suite]
elif repos[j]['skipmissing']:
sui = None
if j == 'debian' and suite in skips:
sui = None
if sui:
pkgs.append(join(spooldir, repos[j]['dists'], sui, i))
else:
pkgs.append(None)
merge_list.append(pkgs)
# perform the actual merge
if merge_list:
info('Merging files...')
mrgpool = Pool(cpunm)
mrgpool.map(merge, merge_list)
# generate Release files if we got any new files
if needsmerge['downloads'] or regenrelease:
info('Generating Release...')
gen_release(suite)
def main():
"""
Do the update for all repos
"""
roots = prepare_merge_dict()
for suite, paths in roots.items():
perform_update(suite, paths)
# break
if __name__ == '__main__':
try:
t1 = time()
check_lock()
main()
free_lock()
t2 = time()
info('Total incremental update time: %s' % (t2 - t1), tofile=True)
except Exception as e:
die(e)