summaryrefslogtreecommitdiffhomepage
path: root/salvager/zkill.py
blob: 9f30dc15f9ea92bb5de4905b4d735b69b6c97653 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import re
from html.parser import HTMLParser

import requests


OWNER_HREF = re.compile(r"/(?:corporation|alliance)/(\d+)/?")


class ZkillError(Exception):
	"""
	Marks a problem encountered when interfacing with Zkillboard's API.
	"""


class RelatedParser(HTMLParser):
	"""
	Reads kill IDs and teams from Zkillboard's related kills page.
	"""
	def __init__(self):
		super().__init__()
		self._team = 0
		self._kills = set()
		self._current = None

	def handle_starttag(self, tag, attrs):
		attrs = dict(attrs)
		if tag == "table" and attrs.get("id", "").lower() == "killlist":
			self._team += 1
		if tag == "tr" and attrs.get("class", "").lower() == "killlistrow" and self._team > 0:
			self._flush()
			killid = attrs.get("killid", "")
			self._current = (killid, self._team, None)
		if tag == "a" and self._team > 0 and self._current:
			match = OWNER_HREF.match(attrs.get("href", ""))
			if match:
				self._current = (*self._current[:2], match.group(1))
				self._flush()

	def _flush(self):
		if self._current and all(self._current):
			self._kills.add(self._current)
			self._current = None

	@property
	def kills(self):
		"""
		Returns all kills found by the parser along with their team and the ID of the victim.
		"""
		self._flush()
		return self._kills


def hash(kill_id):
	"""
	Looks up and returns the hash associated with *kill_id* using Zkillboard's API.
	"""
	response = requests.get("https://zkillboard.com/api/killID/{}/".format(kill_id))
	response.raise_for_status()
	data = response.json()
	if len(data) == 0:
		raise ZkillError("Could not find hash", kill_id)
	if len(data) > 1:
		raise ZkillError("Too many hashes found", kill_id)
	return data[0]['zkb']['hash']


def parse_battle_report(url):
	"""
	Builds a basic snapshot containing all killmails from the battle report at *url*.
	"""
	response = requests.get(url)
	response.raise_for_status()
	page = response.text
	related = RelatedParser()
	related.feed(page)
	killmails = []
	teams = (set(), set())
	for kill, team, victim in related.kills:
		killmails.append({'killmail_id': int(kill)})
		destination = teams[team - 1]
		destination.add(int(victim))
	return {'killmails': killmails, 'teams': list(map(list, teams))}