This file is indexed.

/usr/lib/update-notifier/package-data-downloader is in update-notifier-common 0.119ubuntu8.1.

This file is owned by root:root, with mode 0o755.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Process new requests to download per-package data"""
# Copyright (C) 2012 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

import glob
import os
import sys
import subprocess
import re
import traceback
import urllib
import debian.deb822
import string
import apt
import debconf
from datetime import timedelta

DATADIR = "/usr/share/package-data-downloads/"
STAMPDIR = "/var/lib/update-notifier/package-data-downloads/"
NOTIFIER_SOURCE_FILE = "/usr/share/update-notifier/package-data-downloads-failed"
NOTIFIER_FILE = "/var/lib/update-notifier/user.d/data-downloads-failed"
NOTIFIER_PERMANENT_SOURCE_FILE = NOTIFIER_SOURCE_FILE + '-permanently'
NOTIFIER_PERMANENT_FILE = NOTIFIER_FILE + '-permanently'

failures = []
permanent_failures = []

def create_or_update_stampfile(file):
	"""Create or update the indicated stampfile, and remove failure flags"""

	try:
		with open(file, 'w'):
			pass
	# Ignore errors
	except Exception:
		traceback.print_exc(file=sys.stderr)

	os.utime(file, None)

	for ext in ('.failed', '.permanent-failure'):
		if os.path.exists(file + ext):
			os.unlink(file + ext)


def mark_hook_failed(hook_name, permanent=False):
	"""Create a stampfile recording that a hook failed

	We create separate stampfiles for failed hooks so we can
	keep track of how long the hook has been failing and if the failure
	should be considered permanent."""

	if permanent:
		filename = hook_name + '.permanent-failure'
	else:
		filename = hook_name + '.failed'

	failure_file = os.path.join(STAMPDIR, filename)
	try:
		with open(failure_file, 'w'):
			pass
		
	# Ignore errors
	except Exception:
		traceback.print_exc(file=sys.stderr)

	for ext in ('', '.failed', '.permanent-failure'):
		stampfile = hook_name + ext
		if filename != stampfile \
		   and os.path.exists(os.path.join(STAMPDIR, stampfile)):
			os.unlink(os.path.join(STAMPDIR, stampfile))


def hook_is_permanently_failed(hook_name):
	"""Check if this hook has been marked as permanently failing.

	If so, don't raise any more errors about it."""

	failure_file = os.path.join(STAMPDIR, hook_name + '.permanent-failure')
	return os.path.exists(failure_file)


def hook_aged_out(hook_name):
	"""Check if this hook has been failing consistently for >= 3 days"""

	failure_file = os.path.join(STAMPDIR, hook_name + '.failed')
	try:
		hook_date = os.stat(failure_file).st_ctime
		cur_time = os.times()[4]
		d = timedelta(microseconds=cur_time-hook_date)
		if d.days >= 3:
			return True
	except OSError:
		pass
	except Exception:
		traceback.print_exc(file=sys.stderr)
	return False


def record_failure(hook):
	"""Record that the named hook has failed"""
	if hook_aged_out(hook):
		permanent_failures.append(hook)
	else:
		failures.append(hook)


def existing_permanent_failures():
	"""Return the list of all previously recorded permanent failures"""

        files = glob.glob(os.path.join(STAMPDIR, "*.permanent-failure"))
        return [os.path.splitext(os.path.basename(path))[0] for path in files]


def trigger_update_notifier(failures, permanent=False):
	"""Tell update-notifier that there were failed packages"""

	try:
		if permanent:
                        with open(NOTIFIER_PERMANENT_SOURCE_FILE, 'r') as f:
                                input = f.read()
			output_file = open(NOTIFIER_PERMANENT_FILE, 'w')
		else:
                        with open(NOTIFIER_SOURCE_FILE, 'r') as f:
                                input = f.read()
			output_file = open(NOTIFIER_FILE, 'w')
	except Exception:
		# Things failed and we can't even notify about it.  Break the
		# trigger so that there's some error propagation, even if not
		# the most pleasant sort.
		traceback.print_exc(file=sys.stderr)
		sys.exit(1)

	packages = [os.path.basename(failure) for failure in failures]
        output_file.write(
                string.Template(input).substitute(
                        {'packages' : ", ".join(packages)}))
	output_file.close()

def get_hook_file_names():
        res = []
        for relfile in os.listdir(DATADIR):
                # ignore files ending in .dpkg-*
                if (os.path.splitext(relfile)[1] and
                    os.path.splitext(relfile)[1].startswith(".dpkg")):
                        continue
                res.append(relfile)
        return res

def process_download_requests():
	"""Process requests to download package data files

	Iterate over /usr/share/package-data-downloads and download any
	package data specified in the contained file, then hand off to
	the indicated handler for further processing.

	Successful downloads are recorded in
	/var/lib/update-notifier/package-data-downloads to avoid unnecessary
	repeat handling.

	Failed downloads are reported to the user via the
	update-notifier interface."""

	# Get our proxy settings from the environment
	proxies = {}
	try:
		for proto in ('http','https','ftp'):
			try:
				proxies[proto] = os.environ[proto+"_proxy"]
			except KeyError:
				pass

		if proxies:
			urllib._urlopener = urllib.FancyURLopener(proxies)
	except Exception:
		pass

	# Iterate through all the available hooks.  If we get a failure
	# processing any of them (download failure, checksum failure, or
	# failure to run the hook script), record it but continue processing
	# the rest of the hooks since some of them may succeed.
	for relfile in get_hook_file_names():

		stampfile = os.path.join(STAMPDIR, relfile)
		file = os.path.join(DATADIR, relfile)
		try:
			hook_date = os.stat(file).st_mtime
			stamp_date = os.stat(stampfile).st_mtime
			if hook_date < stamp_date:
				continue
		except Exception as e:
			if not isinstance(e, OSError):
				traceback.print_exc(file=sys.stderr)

		hook = debian.deb822.Deb822()
		files = []
		sums = []
		for para in hook.iter_paragraphs(open(file)):
			if para.has_key('script'):
				if not files:
					record_failure(relfile)
					break
				command = [para['script']]

				if para.has_key('should-download'):
					db = debconf.DebconfCommunicator('update-notifier')
					try:
						should = db.get(para['should-download'])
						if should == "false":
							# Do nothing with this file.
							break
					except:
						pass
					finally:
						db.shutdown()

				# Download each file and verify the sum
				try:
					for i in range(len(files)):
						print "%s: downloading %s" % (relfile, files[i])
						dest_file = urllib.urlretrieve(files[i])[0]
						output = subprocess.check_output(["sha256sum", dest_file])
						output = output.split(' ')[0]
						if output == sums[i]:
							command.append(dest_file)
						else:
							record_failure(relfile)
							break
					if relfile in failures + permanent_failures:
						break

					sys.stdout.flush()
					result = subprocess.call(command)
					if result:
						# There's no sense redownloading if the script fails
						permanent_failures.append(relfile)
					else:
						create_or_update_stampfile(stampfile)
					break
				except Exception:
					traceback.print_exc(file=sys.stderr)

				record_failure(relfile)
				# The 'script' is always the last stanza
				break

			# Not in a 'script' stanza, so we should have some urls
			try:
				files.append(para['url'])
				sums.append(para['sha256'])
			except Exception as e:
				if not isinstance(e, KeyError):
					traceback.print_exc(file=sys.stderr)
				record_failure(relfile)
				break

	previous_failures = existing_permanent_failures()

	# We only report about "permanent" failures when there are new ones,
	# but we want the whole list of permanently-failing hooks so when
	# we clobber the update-notifier file we don't lose information the
	# user may not have seen yet
	if permanent_failures:
		new_failures = False
		for failure in permanent_failures:
			if failure not in previous_failures:
				mark_hook_failed(failure, permanent=True)
				previous_failures.append(failure)
				new_failures = True
		if new_failures:
			trigger_update_notifier(previous_failures, permanent=True)

	# Filter out new failure reports for permanently-failed packages
	our_failures = [x for x in failures if x not in previous_failures]

	if our_failures:
		for failure in our_failures:
			mark_hook_failed(failure)
		trigger_update_notifier(our_failures)


if __name__ == "__main__":
	process_download_requests()