From 7792ae70fcca12203bda5e3675fd3460d26a3b58 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Fri, 28 Apr 2017 01:01:18 -0600 Subject: [PATCH 01/38] WIP --- monocle/bounds.py | 97 +++-------------------- monocle/db.py | 24 +++--- monocle/overseer.py | 43 +++++----- monocle/sanitized.py | 8 +- monocle/spawns.py | 162 ++++++++++++-------------------------- optional-requirements.txt | 6 +- requirements.txt | 6 +- 7 files changed, 107 insertions(+), 239 deletions(-) diff --git a/monocle/bounds.py b/monocle/bounds.py index 9a3117cd7..c7472c8a3 100644 --- a/monocle/bounds.py +++ b/monocle/bounds.py @@ -4,91 +4,20 @@ from .utils import get_distance -class Bounds: - def __init__(self): - self.north = max(conf.MAP_START[0], conf.MAP_END[0]) - self.south = min(conf.MAP_START[0], conf.MAP_END[0]) - self.east = max(conf.MAP_START[1], conf.MAP_END[1]) - self.west = min(conf.MAP_START[1], conf.MAP_END[1]) - self.center = ((self.north + self.south) / 2, - (self.west + self.east) / 2) - self.multi = False +if conf.MULTI_BOUNDARIES: + from pogeo import Polygon - def __bool__(self): - """Are boundaries a polygon?""" - return False + sys.modules[__name__] = Polygon(conf.MULTI_BOUNDARIES, conf.HOLES) +elif conf.BOUNDARIES: + if conf.HOLES: + from pogeo import Polygon - def __contains__(self, p): - return True - - def __hash__(self): - return 0 - - @property - def area(self): - """Returns the square kilometers for configured scan area""" - width = get_distance((self.center[0], self.west), (self.center[0], self.east), 2) - height = get_distance((self.south, 0), (self.north, 0), 2) - return round(width * height) - - -class PolyBounds(Bounds): - def __init__(self, polygon=conf.BOUNDARIES): - self.boundaries = prep(polygon) - self.south, self.west, self.north, self.east = polygon.bounds - self.center = polygon.centroid.coords[0] - self.multi = False - self.polygon = polygon - - def __bool__(self): - """Are boundaries a polygon?""" - return True - - def __contains__(self, p): - return self.boundaries.contains(Point(p)) - - def __hash__(self): - return hash((self.south, self.west, self.north, self.east)) - - -class MultiPolyBounds(PolyBounds): - def __init__(self): - super().__init__() - self.multi = True - self.polygons = [PolyBounds(polygon) for polygon in self.polygon] - - def __hash__(self): - return hash(tuple(hash(x) for x in self.polygons)) - - @property - def area(self): - return sum(x.area for x in self.polygons) - - -class RectBounds(Bounds): - def __contains__(self, p): - lat, lon = p - return (self.south <= lat <= self.north and - self.west <= lon <= self.east) - - def __hash__(self): - return hash((self.north, self.east, self.south, self.west)) - - -if conf.BOUNDARIES: - try: - from shapely.geometry import MultiPolygon, Point, Polygon - from shapely.prepared import prep - except ImportError as e: - raise ImportError('BOUNDARIES is set but shapely is not available.') from e - - if isinstance(conf.BOUNDARIES, Polygon): - sys.modules[__name__] = PolyBounds() - elif isinstance(conf.BOUNDARIES, MultiPolygon): - sys.modules[__name__] = MultiPolyBounds() + sys.modules[__name__] = Polygon(conf.BOUNDARIES, conf.HOLES) else: - raise TypeError('BOUNDARIES must be a shapely Polygon.') -elif conf.STAY_WITHIN_MAP: - sys.modules[__name__] = RectBounds() + from pogeo import Loop + + sys.modules[__name__] = Loop(conf.BOUNDARIES) else: - sys.modules[__name__] = Bounds() + from pogeo import Rectangle + + sys.modules[__name__] = Rectangle(conf.MAP_START, conf.MAP_END, conf.STAY_WITHIN_MAP) diff --git a/monocle/db.py b/monocle/db.py index bcb82750f..c58ec46ca 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -3,6 +3,7 @@ from contextlib import contextmanager from enum import Enum from time import time, mktime +from hashlib import sha256 from sqlalchemy import Column, Integer, String, Float, SmallInteger, BigInteger, ForeignKey, UniqueConstraint, create_engine, cast, func, desc, asc, and_, exists from sqlalchemy.orm import sessionmaker, relationship @@ -15,6 +16,8 @@ from .utils import time_until_time, dump_pickle, load_pickle from .shared import call_at, get_logger +contains_spawn = bounds.contains_cellid if conf.SPAWN_ID_INT else bounds.contains_token + try: assert conf.LAST_MIGRATION < time() except AssertionError: @@ -175,7 +178,7 @@ def __contains__(self, sighting): def pickle(self): state = self.__dict__.copy() - state['db_hash'] = spawns.db_hash + state['db_hash'] = DB_HASH state['bounds_hash'] = hash(bounds) dump_pickle('forts', state) @@ -183,7 +186,7 @@ def unpickle(self): try: state = load_pickle('forts', raise_exception=True) if all((state['class_version'] == self.class_version, - state['db_hash'] == spawns.db_hash, + state['db_hash'] == DB_HASH, state['bounds_hash'] == hash(bounds))): self.__dict__.update(state) except (FileNotFoundError, TypeError, KeyError): @@ -199,7 +202,7 @@ def unpickle(self): _engine = create_engine(conf.DB_ENGINE) Session = sessionmaker(bind=_engine) DB_TYPE = _engine.name - +DB_HASH = sha256(conf.DB_ENGINE.encode()).digest() if conf.REPORT_SINCE: SINCE_TIME = mktime(conf.REPORT_SINCE.timetuple()) @@ -216,8 +219,6 @@ class Sighting(Base): spawn_id = Column(ID_TYPE) expire_timestamp = Column(Integer, index=True) encounter_id = Column(HUGE_TYPE, index=True) - lat = Column(FLOAT_TYPE) - lon = Column(FLOAT_TYPE) atk_iv = Column(TINY_TYPE) def_iv = Column(TINY_TYPE) sta_iv = Column(TINY_TYPE) @@ -240,8 +241,6 @@ class Mystery(Base): pokemon_id = Column(TINY_TYPE) spawn_id = Column(ID_TYPE, index=True) encounter_id = Column(HUGE_TYPE, index=True) - lat = Column(FLOAT_TYPE) - lon = Column(FLOAT_TYPE) first_seen = Column(Integer, index=True) first_seconds = Column(SmallInteger) last_seconds = Column(SmallInteger) @@ -267,8 +266,6 @@ class Spawnpoint(Base): id = Column(Integer, primary_key=True) spawn_id = Column(ID_TYPE, unique=True, index=True) despawn_time = Column(SmallInteger, index=True) - lat = Column(FLOAT_TYPE) - lon = Column(FLOAT_TYPE) updated = Column(Integer, index=True) duration = Column(TINY_TYPE) failures = Column(TINY_TYPE) @@ -372,7 +369,7 @@ def add_spawnpoint(session, pokemon): .first() now = round(time()) point = pokemon['lat'], pokemon['lon'] - spawns.add_known(spawn_id, new_time, point) + spawns.add_known(spawn_id, new_time) if existing: existing.updated = now existing.failures = 0 @@ -405,8 +402,7 @@ def add_spawnpoint(session, pokemon): def add_mystery_spawnpoint(session, pokemon): # Check if the same entry already exists spawn_id = pokemon['spawn_id'] - point = pokemon['lat'], pokemon['lon'] - if point in spawns.unknown or session.query(exists().where( + if spawn_id in spawns.unknown or session.query(exists().where( Spawnpoint.spawn_id == spawn_id)).scalar(): return @@ -420,8 +416,8 @@ def add_mystery_spawnpoint(session, pokemon): failures=0 )) - if point in bounds: - spawns.add_unknown(point) + if Location(pokemon['lat'], pokemon['lon']) in bounds: + spawns.unknowns.add(spawn_id) def add_mystery(session, pokemon): diff --git a/monocle/overseer.py b/monocle/overseer.py index 8293f1c27..87ef0eb76 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -16,6 +16,12 @@ from . import bounds, db_proc, spawns, sanitized as conf from .worker import Worker +if conf.SPAWN_ID_INT: + from pogeo import diagonal_distance, cellid_to_location as spawnid_to_loc +else: + from pogeo import diagonal_distance, token_to_location as spawnid_to_loc + + ANSI = '\x1b[2J\x1b[H' if platform == 'win32': try: @@ -170,11 +176,11 @@ def update_stats(self, refresh=conf.STAT_REFRESH, med=median, count=conf.GRID[0] self.update_coroutines_count() self.counts = ( - 'Known spawns: {}, unknown: {}, more: {}\n' + 'Known spawns: {}, unknown: {}\n' '{} workers, {} coroutines\n' 'sightings cache: {}, mystery cache: {}, DB queue: {}\n' ).format( - len(spawns), len(spawns.unknown), spawns.cells_count, + len(spawns), len(spawns.unknown), count, self.coroutines_count, len(SIGHTING_CACHE), len(MYSTERY_CACHE), len(db_proc) ) @@ -316,12 +322,12 @@ def get_start_point(self): now = time() % 3600 closest = None - for spawn_id, spawn_time in spawns.known.values(): + for spawn_id, spawn_time in spawns.known.items(): time_diff = now - spawn_time - if 0 < time_diff < smallest_diff: + if 0.0 < time_diff < smallest_diff: smallest_diff = time_diff closest = spawn_id - if smallest_diff < 3: + if smallest_diff < 3.0: break return closest @@ -358,7 +364,7 @@ async def launch(self, bootstrap, pickle): return update_spawns = False - self.mysteries = spawns.mystery_gen() + self.mysteries = iter(spawns.unknown.copy()) while True: try: await self._launch(update_spawns) @@ -383,7 +389,7 @@ async def _launch(self, update_spawns): start_point = self.get_start_point() if start_point and not spawns.after_last(): spawns_iter = dropwhile( - lambda s: s[1][0] != start_point, spawns.items()) + lambda s: s[0] != start_point, spawns.items()) else: spawns_iter = iter(spawns.items()) @@ -393,7 +399,7 @@ async def _launch(self, update_spawns): captcha_limit = conf.MAX_CAPTCHAS skip_spawn = conf.SKIP_SPAWN - for point, (spawn_id, spawn_seconds) in spawns_iter: + for spawn_id, spawn_seconds in spawns_iter: try: if self.captcha_queue.qsize() > captcha_limit: self.paused = True @@ -408,15 +414,15 @@ async def _launch(self, update_spawns): # positive = already happened time_diff = time() - spawn_time - while time_diff < 0.5: + while time_diff < 0.4: try: - mystery_point = next(self.mysteries) + mystery_id = next(self.mysteries) await self.coroutine_semaphore.acquire() - LOOP.create_task(self.try_point(mystery_point)) + LOOP.create_task(self.try_point(mystery_id)) except StopIteration: if self.next_mystery_reload < monotonic(): - self.mysteries = spawns.mystery_gen() + self.mysteries = iter(spawns.unknown.copy()) self.next_mystery_reload = monotonic() + conf.RESCAN_UNKNOWN else: await sleep(min(spawn_time - time() + .5, self.next_mystery_reload - monotonic()), loop=LOOP) @@ -499,11 +505,11 @@ async def bootstrap_try(point): tasks = (bootstrap_try(x) for x in get_bootstrap_points(bounds)) await gather(*tasks, loop=LOOP) - async def try_point(self, point, spawn_time=None, spawn_id=None): + async def try_spawn(self, spawn_id, spawn_time=None, _jitter=diagonal_distance(bounds.center, 50.0 if conf.ENCOUNTER else 65.0)): try: - point = randomize_point(point) - skip_time = monotonic() + (conf.GIVE_UP_KNOWN if spawn_time else conf.GIVE_UP_UNKNOWN) - worker = await self.best_worker(point, skip_time) + location = spawnid_to_loc(spawn_id) + location.jitter(_jitter) + worker = await self.best_worker(location, monotonic() + (conf.GIVE_UP_KNOWN if spawn_time else conf.GIVE_UP_UNKNOWN)) if not worker: if spawn_time: self.skipped += 1 @@ -521,8 +527,7 @@ async def try_point(self, point, spawn_time=None, spawn_id=None): finally: self.coroutine_semaphore.release() - async def best_worker(self, point, skip_time): - good_enough = conf.GOOD_ENOUGH + async def best_worker(self, location, skip_time, _good_enough=conf.GOOD_ENOUGH): while self.running: gen = (w for w in self.workers if not w.busy.locked()) try: @@ -535,7 +540,7 @@ async def best_worker(self, point, skip_time): if speed < lowest_speed: lowest_speed = speed worker = w - if speed < good_enough: + if speed < _good_enough: break if lowest_speed < conf.SPEED_LIMIT: worker.speed = lowest_speed diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 05aff696e..75a5a94a1 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -28,7 +28,7 @@ 'AREA_NAME': str, 'AUTHKEY': bytes, 'BOOTSTRAP_RADIUS': Number, - 'BOUNDARIES': object, + 'BOUNDARIES': tuple, 'CACHE_CELLS': bool, 'CAPTCHAS_ALLOWED': int, 'CAPTCHA_KEY': str, @@ -54,6 +54,7 @@ 'HASHTAGS': set_sequence, 'HASH_KEY': (str,) + set_sequence, 'HEATMAP': bool, + 'HOLES': tuple, 'IGNORE_IVS': bool, 'IGNORE_RARITY': bool, 'IMAGE_STATS': bool, @@ -79,8 +80,8 @@ 'MAX_RETRIES': int, 'MINIMUM_RUNTIME': Number, 'MINIMUM_SCORE': Number, - 'MORE_POINTS': bool, 'MOVE_FONT': str, + 'MULTI_BOUNDARIES': tuple, 'NAME_FONT': str, 'NEVER_NOTIFY_IDS': set_sequence_range, 'NOTIFY': bool, @@ -162,6 +163,7 @@ 'GOOD_ENOUGH': 0.1, 'GOOGLE_MAPS_KEY': '', 'HASHTAGS': None, + 'HOLES': None, 'IGNORE_IVS': False, 'IGNORE_RARITY': False, 'IMAGE_STATS': False, @@ -184,8 +186,8 @@ 'MAX_CAPTCHAS': 0, 'MAX_RETRIES': 3, 'MINIMUM_RUNTIME': 10, - 'MORE_POINTS': False, 'MOVE_FONT': 'sans-serif', + 'MULTI_BOUNDARIES': None, 'NAME_FONT': 'sans-serif', 'NEVER_NOTIFY_IDS': (), 'NOTIFY': False, diff --git a/monocle/spawns.py b/monocle/spawns.py index 1500583dc..a8d04efd1 100644 --- a/monocle/spawns.py +++ b/monocle/spawns.py @@ -2,20 +2,27 @@ from collections import deque, OrderedDict from time import time -from itertools import chain -from hashlib import sha256 from . import bounds, db, sanitized as conf +from .db import DB_HASH, session_scope, Spawnpoint from .shared import get_logger from .utils import dump_pickle, load_pickle, get_current_hour, time_until_time +contains_spawn = bounds.contains_cellid if conf.SPAWN_ID_INT else bounds.contains_token -class BaseSpawns: + +class Spawns: """Manage spawn points and times""" + + __spec__ = __spec__ + __slots__ = ('known', 'despawn_times', 'unknown', 'log') + def __init__(self): ## Spawns with known times - # {(lat, lon): (spawn_id, spawn_seconds)} + # {spawn_id: spawn_seconds} self.known = OrderedDict() + + # points may not be in bounds, but are visible from within bounds # {spawn_id: despawn_seconds} self.despawn_times = {} @@ -23,8 +30,6 @@ def __init__(self): # {(lat, lon)} self.unknown = set() - self.class_version = 3 - self.db_hash = sha256(conf.DB_ENGINE.encode()).digest() self.log = get_logger('spawns') def __len__(self): @@ -33,64 +38,56 @@ def __len__(self): def __bool__(self): return len(self.despawn_times) > 0 - def update(self): - bound = bool(bounds) - last_migration = conf.LAST_MIGRATION - - with db.session_scope() as session: - query = session.query(db.Spawnpoint) - if bound or conf.STAY_WITHIN_MAP: - query = query.filter(db.Spawnpoint.lat >= bounds.south, - db.Spawnpoint.lat <= bounds.north, - db.Spawnpoint.lon >= bounds.west, - db.Spawnpoint.lon <= bounds.east) - known = {} - for spawn in query: - point = spawn.lat, spawn.lon + def items(self): + return self.known.items() + + def add_known(self, spawn_id, despawn_time): + self.despawn_times[spawn_id] = despawn_time + self.unknown.discard(spawn_id) + def update(self, _migration=conf.LAST_MIGRATION, _contains=contains_spawn): + with session_scope() as session: + query = session.query(Spawnpoint.spawn_id, Spawnpoint.despawn_time, Spawnpoint.duration, Spawnpoint.updated) + known = {} + for spawn_id, despawn_time, duration, updated in query: # skip if point is not within boundaries (if applicable) - if bound and point not in bounds: + if not _contains(spawn_id): continue - if not spawn.updated or spawn.updated <= last_migration: - self.unknown.add(point) + if not updated or updated < _migration: + self.unknown.add(spawn_id) continue - if spawn.duration == 60: - spawn_time = spawn.despawn_time - else: - spawn_time = (spawn.despawn_time + 1800) % 3600 + self.despawn_times[spawn_id] = despawn_time if duration == 60 else (despawn_time + 1800) % 3600 - self.despawn_times[spawn.spawn_id] = spawn.despawn_time - known[point] = spawn.spawn_id, spawn_time - self.known = OrderedDict(sorted(known.items(), key=lambda k: k[1][1])) + known[spawn_id] = spawn_time + if known: + self.known = OrderedDict(sorted(known.items(), key=lambda k: k[1])) def after_last(self): try: k = next(reversed(self.known)) - seconds = self.known[k][1] - return time() % 3600 > seconds + return time() % 3600 > self.known[k] except (StopIteration, KeyError, TypeError): return False def get_despawn_time(self, spawn_id, seen): - hour = get_current_hour(now=seen) try: - despawn_time = self.despawn_times[spawn_id] + hour - if seen > despawn_time: - despawn_time += 3600 - return despawn_time + despawn_time = self.despawn_times[spawn_id] + get_current_hour(now=seen) + return despawn_time if seen < despawn_time else despawn_time + 3600 except KeyError: return None def unpickle(self): try: state = load_pickle('spawns', raise_exception=True) - if all((state['class_version'] == self.class_version, - state['db_hash'] == self.db_hash, - state['bounds_hash'] == hash(bounds), - state['last_migration'] == conf.LAST_MIGRATION)): - self.__dict__.update(state) + if (state['class_version'] == 4, + and state['db_hash'] == DB_HASH, + and state['bounds_hash'] == hash(bounds), + and state['last_migration'] == conf.LAST_MIGRATION): + self.despawn_times = state['despawn_times'] + self.known = state['known'] + self.unknown = state['unknown'] return True else: self.log.warning('Configuration changed, reloading spawns from DB.') @@ -101,79 +98,18 @@ def unpickle(self): return False def pickle(self): - state = self.__dict__.copy() - del state['log'] - state.pop('cells_count', None) - state['bounds_hash'] = hash(bounds) - state['last_migration'] = conf.LAST_MIGRATION - dump_pickle('spawns', state) + dump_pickle('spawns', { + 'bounds_hash': hash(bounds), + 'class_version': 4, + 'db_hash': DB_HASH, + 'despawn_times': self.despawn_times, + 'known': self.known, + 'last_migration': conf.LAST_MIGRATION, + 'unknown': self.unknown}) @property def total_length(self): - return len(self.despawn_times) + len(self.unknown) + self.cells_count - - -class Spawns(BaseSpawns): - def __init__(self): - super().__init__() - self.cells_count = 0 - - def items(self): - return self.known.items() - - def add_known(self, spawn_id, despawn_time, point): - self.despawn_times[spawn_id] = despawn_time - self.unknown.discard(point) + return len(self.despawn_times) + len(self.unknown) - def add_unknown(self, point): - self.unknown.add(point) - - def unpickle(self): - result = super().unpickle() - try: - del self.cell_points - except AttributeError: - pass - return result - - def mystery_gen(self): - for mystery in self.unknown.copy(): - yield mystery - - -class MoreSpawns(BaseSpawns): - def __init__(self): - super().__init__() - - ## Coordinates mentioned as "spawn_points" in GetMapObjects response - ## May or may not be actual spawn points, more research is needed. - # {(lat, lon)} - self.cell_points = set() - - def items(self): - # return a copy since it may be modified - return self.known.copy().items() - - def add_known(self, spawn_id, despawn_time, point): - self.despawn_times[spawn_id] = despawn_time - # add so that have_point() will be up to date - self.known[point] = None - self.unknown.discard(point) - self.cell_points.discard(point) - - def add_unknown(self, point): - self.unknown.add(point) - self.cell_points.discard(point) - - def have_point(self, point): - return point in chain(self.cell_points, self.known, self.unknown) - - def mystery_gen(self): - for mystery in chain(self.unknown.copy(), self.cell_points.copy()): - yield mystery - - @property - def cells_count(self): - return len(self.cell_points) -sys.modules[__name__] = MoreSpawns() if conf.MORE_POINTS else Spawns() +sys.modules[__name__] = Spawns() diff --git a/optional-requirements.txt b/optional-requirements.txt index d68f50e10..e74381236 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -3,11 +3,13 @@ peony-twitter>=0.9.3 shapely>=1.3.0 selenium>=3.0 uvloop>=0.7.0 -mysqlclient>=1.3 -psycopg2>=2.6 cchardet>=1.1.0 aiodns>=1.1.0 aiosocks>=0.2.2 sanic>=0.3 asyncpg>=0.8 ujson>=1.35 +gpsoauth>=0.4.0 +flask>=0.11.1 +psycopg2>=2.6 +mysqlclient>=1.3 diff --git a/requirements.txt b/requirements.txt index 822785e08..f35d5f9e4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,9 @@ geopy>=1.11.0 protobuf>=3.0.0 -flask>=0.11.1 -gpsoauth>=0.4.0 werkzeug>=0.11.15 sqlalchemy>=1.1.0 aiopogo>=1.8.0 polyline>=1.3.1 aiohttp>=2.0.7,<2.1 -pogeo==0.3.* -cyrandom>=0.1.2 +pogeo>=0.4.0 +cyrandom>=0.3.0 From 5321881f4451a9d4a10485fdf63ae60b087fed67 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Fri, 28 Apr 2017 17:22:13 -0600 Subject: [PATCH 02/38] WIP2 --- monocle/overseer.py | 56 ++++---- monocle/utils.py | 84 +----------- monocle/worker.py | 270 ++++++++++++++++---------------------- optional-requirements.txt | 1 + requirements.txt | 2 - 5 files changed, 151 insertions(+), 262 deletions(-) diff --git a/monocle/overseer.py b/monocle/overseer.py index 87ef0eb76..a8e3bb1b6 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -11,15 +11,15 @@ from sqlalchemy.exc import OperationalError from .db import SIGHTING_CACHE, MYSTERY_CACHE -from .utils import get_current_hour, dump_pickle, get_start_coords, get_bootstrap_points, randomize_point, best_factors, percentage_split -from .shared import get_logger, LOOP, run_threaded, ACCOUNTS +from .utils import get_current_hour, dump_pickle, get_start_coords, best_factors, percentage_split +from .shared import ACCOUNTS, get_logger, LOOP, run_threaded from . import bounds, db_proc, spawns, sanitized as conf from .worker import Worker if conf.SPAWN_ID_INT: - from pogeo import diagonal_distance, cellid_to_location as spawnid_to_loc + from pogeo import diagonal_distance, level_edge, cellid_to_location as spawnid_to_loc else: - from pogeo import diagonal_distance, token_to_location as spawnid_to_loc + from pogeo import diagonal_distance, level_edge, token_to_location as spawnid_to_loc ANSI = '\x1b[2J\x1b[H' @@ -37,7 +37,7 @@ from os import system ANSI = '' -BAD_STATUSES = ( +BAD_STATUSES = { 'FAILED LOGIN', 'EXCEPTION', 'NOT AUTHENTICATED', @@ -57,7 +57,7 @@ 'HASHING ERROR', 'PROXY ERROR', 'TIMEOUT' -) +} class Overseer: @@ -77,6 +77,7 @@ def __init__(self, manager): self.idle_seconds = 0 self.log.info('Overseer initialized') self.pokemon_found = '' + self.jitter_lat, self.jitter_lon = def start(self, status_bar): self.captcha_queue = self.manager.captcha_queue() @@ -255,8 +256,8 @@ def _print_status(self, _ansi=ANSI, _start=datetime.now(), _notify=conf.NOTIFY): ] try: - seen = Worker.g['seen'] - captchas = Worker.g['captchas'] + seen = Worker.seen + captchas = Worker.captchas output.append('Seen per visit: {v:.2f}, per minute: {m:.0f}'.format( v=seen / self.visits, m=seen / (seconds_since_start / 60))) @@ -314,7 +315,7 @@ def longest_running(self): if w.start_time < earliest: worker = w earliest = w.start_time - minutes = ((time() * 1000) - earliest) / 60000 + minutes = (monotonic() - earliest) / 60.0 return worker, minutes def get_start_point(self): @@ -336,6 +337,7 @@ async def update_spawns(self, initial=False): try: await run_threaded(spawns.update) LOOP.create_task(run_threaded(spawns.pickle)) + return except OperationalError as e: self.log.exception('Operational error while trying to update spawns.') if initial: @@ -346,8 +348,6 @@ async def update_spawns(self, initial=False): except Exception as e: self.log.exception('A wild {} appeared while updating spawns!', e.__class__.__name__) await sleep(15, loop=LOOP) - else: - break async def launch(self, bootstrap, pickle): exceptions = 0 @@ -419,7 +419,7 @@ async def _launch(self, update_spawns): mystery_id = next(self.mysteries) await self.coroutine_semaphore.acquire() - LOOP.create_task(self.try_point(mystery_id)) + LOOP.create_task(self.try_spawn(mystery_id, skip_time=conf.GIVE_UP_UNKNOWN)) except StopIteration: if self.next_mystery_reload < monotonic(): self.mysteries = iter(spawns.unknown.copy()) @@ -436,7 +436,7 @@ async def _launch(self, update_spawns): continue await self.coroutine_semaphore.acquire() - LOOP.create_task(self.try_point(point, spawn_time, spawn_id)) + LOOP.create_task(self.try_spawn(spawn_id, spawn_time)) async def try_again(self, point): async with self.coroutine_semaphore: @@ -465,7 +465,7 @@ async def bootstrap(self): self.log.warning('Starting bootstrap phase 3.') unknowns = list(spawns.unknown) shuffle(unknowns) - tasks = (self.try_again(point) for point in unknowns) + tasks = (self.try_again(point) for point in map(spawnid_to_loc, unknowns)) await gather(*tasks, loop=LOOP) self.log.warning('Finished bootstrapping.') @@ -494,22 +494,25 @@ async def visit_release(worker, num, *args): async def bootstrap_two(self): async def bootstrap_try(point): async with self.coroutine_semaphore: - randomized = randomize_point(point, randomization) - LOOP.call_later(1790, LOOP.create_task, self.try_again(randomized)) + point.jitter(lat_amount, lon_amount) + LOOP.call_later(1790, LOOP.create_task, self.try_again(point)) worker = await self.best_worker(point, False) async with worker.busy: self.visits += await worker.bootstrap_visit(point) # randomize to within ~140m of the nearest neighbor on the second visit - randomization = conf.BOOTSTRAP_RADIUS / 155555 - 0.00045 - tasks = (bootstrap_try(x) for x in get_bootstrap_points(bounds)) + if conf.BOOTSTRAP_LEVEL < 17: + lat_amount, lon_amount = diagonal_distance(bounds.center, level_edge(conf.BOOSTRAP_LEVEL) - 140.0) + else: + lat_amount = lon_amount = 0.0 + tasks = (bootstrap_try(p) for p in bounds.get_points(bootstrap_level)) await gather(*tasks, loop=LOOP) - async def try_spawn(self, spawn_id, spawn_time=None, _jitter=diagonal_distance(bounds.center, 50.0 if conf.ENCOUNTER else 65.0)): + async def try_spawn(self, spawn_id, spawn_time=None, skip_time=conf.GIVE_UP_KNOWN, _jitter = diagonal_distance(bounds.center, 50.0 if conf.ENCOUNTER else 65.0)): try: location = spawnid_to_loc(spawn_id) - location.jitter(_jitter) - worker = await self.best_worker(location, monotonic() + (conf.GIVE_UP_KNOWN if spawn_time else conf.GIVE_UP_UNKNOWN)) + location.jitter(*_jitter) + worker = await self.best_worker(location, monotonic() + skip_time) if not worker: if spawn_time: self.skipped += 1 @@ -523,11 +526,11 @@ async def try_spawn(self, spawn_id, spawn_time=None, _jitter=diagonal_distance(b except CancelledError: raise except Exception: - self.log.exception('An exception occurred in try_point') + self.log.exception('An exception occurred in try_spawn') finally: self.coroutine_semaphore.release() - async def best_worker(self, location, skip_time, _good_enough=conf.GOOD_ENOUGH): + async def best_worker(self, location, skip_time, _enough=conf.GOOD_ENOUGH, _limit=conf.SPEED_LIMIT): while self.running: gen = (w for w in self.workers if not w.busy.locked()) try: @@ -538,11 +541,12 @@ async def best_worker(self, location, skip_time, _good_enough=conf.GOOD_ENOUGH): for w in gen: speed = w.travel_speed(point) if speed < lowest_speed: + if speed <= _enough: + w.speed = speed + return w lowest_speed = speed worker = w - if speed < _good_enough: - break - if lowest_speed < conf.SPEED_LIMIT: + if lowest_speed <= _limit: worker.speed = lowest_speed return worker if skip_time and monotonic() > skip_time: diff --git a/monocle/utils.py b/monocle/utils.py index a186eaa84..5cd8b0efe 100644 --- a/monocle/utils.py +++ b/monocle/utils.py @@ -3,17 +3,14 @@ from os import mkdir from os.path import join, exists from sys import platform -from asyncio import sleep -from math import sqrt from uuid import uuid4 from enum import Enum from csv import DictReader -from cyrandom import choice, shuffle, uniform +from cyrandom import choice from time import time from pickle import dump as pickle_dump, load as pickle_load, HIGHEST_PROTOCOL -from geopy import Point -from geopy.distance import distance +from pogeo import Location from aiopogo import utilities as pgoapi_utils from pogeo import get_distance @@ -61,69 +58,15 @@ def percentage_split(seq, percentages): def get_start_coords(worker_no, grid=conf.GRID, bounds=bounds): """Returns center of square for given worker""" - per_column = int((grid[0] * grid[1]) / grid[0]) + per_column = (grid[0] * grid[1]) // grid[0] column = worker_no % per_column - row = int(worker_no / per_column) + row = worker_no // per_column part_lat = (bounds.south - bounds.north) / grid[0] part_lon = (bounds.east - bounds.west) / grid[1] start_lat = bounds.north + part_lat * row + part_lat / 2 start_lon = bounds.west + part_lon * column + part_lon / 2 - return start_lat, start_lon - - -def float_range(start, end, step): - """range for floats, also capable of iterating backwards""" - if start > end: - while end <= start: - yield start - start += -step - else: - while start <= end: - yield start - start += step - - -def get_gains(dist=70): - """Returns lat and lon gain - - Gain is space between circles. - """ - start = Point(*bounds.center) - base = dist * sqrt(3) - height = base * sqrt(3) / 2 - dis_a = distance(meters=base) - dis_h = distance(meters=height) - lon_gain = dis_a.destination(point=start, bearing=90).longitude - lat_gain = dis_h.destination(point=start, bearing=0).latitude - return abs(start.latitude - lat_gain), abs(start.longitude - lon_gain) - - -def round_coords(point, precision, _round=round): - return _round(point[0], precision), _round(point[1], precision) - - -def get_bootstrap_points(bounds): - coords = [] - if bounds.multi: - for b in bounds.polygons: - coords.extend(get_bootstrap_points(b)) - return coords - lat_gain, lon_gain = get_gains(conf.BOOTSTRAP_RADIUS) - west, east = bounds.west, bounds.east - bound = bool(bounds) - for map_row, lat in enumerate( - float_range(bounds.south, bounds.north, lat_gain) - ): - row_start_lon = west - if map_row % 2 != 0: - row_start_lon -= 0.5 * lon_gain - for lon in float_range(row_start_lon, east, lon_gain): - point = lat, lon - if not bound or point in bounds: - coords.append(point) - shuffle(coords) - return coords + return Location(start_lat, start_lon) def get_device_info(account): @@ -232,14 +175,6 @@ def accounts_from_csv(new_accounts, pickled_accounts): return accounts -if conf.SPAWN_ID_INT: - def get_spawn_id(pokemon, _int=int): - return _int(pokemon['spawn_point_id'], 16) -else: - def get_spawn_id(pokemon): - return pokemon['spawn_point_id'] - - def get_current_hour(now=None, _time=time): now = now or _time() return round(now - (now % 3600)) @@ -320,12 +255,3 @@ def load_accounts_csv(): for row in reader: accounts[row['username']] = dict(row) return accounts - - -def randomize_point(point, amount=0.0003, randomize=uniform): - '''Randomize point, by up to ~47 meters by default.''' - lat, lon = point - return ( - randomize(lat - amount, lat + amount), - randomize(lon - amount, lon + amount) - ) diff --git a/monocle/worker.py b/monocle/worker.py index 35643c2b1..f3cf458b1 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -8,25 +8,16 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc -from pogeo import get_distance +from pogeo import get_distance, get_cell_ids from .db import SIGHTING_CACHE, MYSTERY_CACHE -from .utils import round_coords, load_pickle, get_device_info, get_spawn_id, get_start_coords, Units, randomize_point +from .utils import load_pickle, get_device_info, get_start_coords, Units from .shared import get_logger, LOOP, SessionManager, run_threaded, ACCOUNTS from . import altitudes, avatar, bounds, db_proc, spawns, sanitized as conf if conf.NOTIFY: from .notification import Notifier -if conf.CACHE_CELLS: - from array import typecodes - if 'Q' in typecodes: - from pogeo import get_cell_ids_compact as _pogeo_cell_ids - else: - from pogeo import get_cell_ids as _pogeo_cell_ids -else: - from pogeo import get_cell_ids as _pogeo_cell_ids - _unit = getattr(Units, conf.SPEED_UNIT.lower()) if conf.SPIN_POKESTOPS: @@ -48,14 +39,14 @@ class Worker: download_hash = "7b9c5056799a2c5c7d48a62c497736cbcf8c4acb" scan_delay = conf.SCAN_DELAY if conf.SCAN_DELAY >= 10 else 10 - g = {'seen': 0, 'captchas': 0} + seen = 0 + captchas = 0 if conf.CACHE_CELLS: cells = load_pickle('cells') or {} @classmethod def get_cell_ids(cls, point): - rounded = round_coords(point, 4) try: return cls.cells[rounded] except KeyError: @@ -92,10 +83,9 @@ def __init__(self, worker_no): raise ValueError("You don't have enough accounts for the number of workers specified in GRID.") from e self.username = self.account['username'] try: - self.location = self.account['location'][:2] - except KeyError: + self.location = self.account['loc'] + except (KeyError, TypeError): self.location = get_start_coords(worker_no) - self.altitude = None self.inventory_timestamp = self.account.get('inventory_timestamp') # last time of any request self.last_request = self.account.get('time', 0) @@ -121,6 +111,7 @@ def __init__(self, worker_no): self.pokestops = conf.SPIN_POKESTOPS self.next_spin = 0 self.handle = HandleStub() + self.start_time = monotonic() def initialize_api(self): device_info = get_device_info(self.account) @@ -402,26 +393,27 @@ async def complete_tutorial(self, tutorial_state, asset_ids): def update_inventory(self, inventory_items): for thing in inventory_items: - obj = thing.get('inventory_item_data', {}) - if 'item' in obj: - item = obj['item'] - item_id = item.get('item_id') - self.items[item_id] = item.get('count', 0) - elif conf.INCUBATE_EGGS: - if ('pokemon_data' in obj and - obj['pokemon_data'].get('is_egg')): - egg = obj['pokemon_data'] - egg_id = egg.get('id') - self.eggs[egg_id] = egg - elif 'egg_incubators' in obj: - self.unused_incubators = [] - for item in obj['egg_incubators'].get('egg_incubator',[]): - if 'pokemon_id' in item: - continue - if item.get('item_id') == 901: - self.unused_incubators.append(item) - else: - self.unused_incubators.insert(0, item) + try: + item_data = thing['inventory_item_data'] + if 'item' in item_data: + item = item_data['item'] + self.items[item['item_id']] = item.get('count', 0) + elif conf.INCUBATE_EGGS: + if ('pokemon_data' in item_data and + item_data['pokemon_data'].get('is_egg')): + egg = item_data['pokemon_data'] + self.eggs[egg['id']] = egg + elif 'egg_incubators' in item_data: + self.unused_incubators = [] + for item in item_data['egg_incubators']['egg_incubator']: + if 'pokemon_id' in item: + continue + if item.get('item_id') == 901: + self.unused_incubators.append(item) + else: + self.unused_incubators.insert(0, item) + except KeyError: + continue async def call(self, request, chain=True, stamp=True, buddy=True, settings=False, dl_hash=True, action=None): if chain: @@ -459,10 +451,10 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False if chain: raise ex.MalformedResponseException('no responses') else: - self.last_request = time() + self.location.update_time() return response else: - self.last_request = time() + self.location.update_time() err = None break except (ex.NotLoggedInException, ex.AuthException) as e: @@ -506,7 +498,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False except ex.BadRPCException: raise except ex.InvalidRPCException as e: - self.last_request = time() + self.location.update_time() if not isinstance(e, type(err)): err = e self.log.warning('{}', e) @@ -525,7 +517,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False self.log.error('{}', e) await sleep(5, loop=LOOP) except (ex.MalformedResponseException, ex.UnexpectedResponseException) as e: - self.last_request = time() + self.location.update_time() if not isinstance(e, type(err)): self.log.warning('{}', e) self.error_code = 'MALFORMED RESPONSE' @@ -535,18 +527,14 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False if action: # pad for time that action would require - self.last_action = self.last_request + action + self.last_action = self.location.time + action try: delta = responses['GET_INVENTORY']['inventory_delta'] self.inventory_timestamp = delta['new_timestamp_ms'] + self.update_inventory(delta['inventory_items']) except KeyError: pass - else: - try: - self.update_inventory(delta['inventory_items']) - except KeyError: - pass if settings: try: dl_settings = responses['DOWNLOAD_SETTINGS'] @@ -568,7 +556,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False pass if self.check_captcha(responses): self.log.warning('{} has encountered a CAPTCHA, trying to solve', self.username) - self.g['captchas'] += 1 + Worker.captchas += 1 await self.handle_captcha(responses) return responses @@ -585,30 +573,28 @@ async def bootstrap_visit(self, point): if await self.visit(point, bootstrap=True): return True self.error_code = '∞' - self.simulate_jitter(0.00005) + self.location.jitter(0.00005, 0.00005, 0.5) return False - async def visit(self, point, spawn_id=None, bootstrap=False): - """Wrapper for self.visit_point - runs it a few times before giving up - - Also is capable of restarting in case an error occurs. + async def visit_point(self, point, spawn_id=None, bootstrap=False): + """Wrapper for visit that sets location and logs in if necessary """ try: try: - self.altitude = altitudes.get(point) + point.altitude = altitudes.get(point) except KeyError: - self.altitude = await altitudes.fetch(point) + point.altitude = await altitudes.fetch(point) self.location = point - self.api.set_position(*self.location, self.altitude) + self.api.location = self.location if not self.authenticated: await self.login() - return await self.visit_point(point, spawn_id, bootstrap) + return await self.visit(point, spawn_id, bootstrap) except ex.NotLoggedInException: self.error_code = 'NOT AUTHENTICATED' await sleep(1, loop=LOOP) if not await self.login(reauth=True): await self.swap_account(reason='reauth failed') - return await self.visit(point, spawn_id, bootstrap) + return await self.visit(self.location, spawn_id, bootstrap) except ex.AuthException as e: self.log.warning('Auth error on {}: {}', self.username, e) self.error_code = 'NOT AUTHENTICATED' @@ -616,7 +602,7 @@ async def visit(self, point, spawn_id=None, bootstrap=False): await self.swap_account(reason='login failed') except CaptchaException: self.error_code = 'CAPTCHA' - self.g['captchas'] += 1 + Worker.captchas += 1 await sleep(1, loop=LOOP) await self.bench_account() except CaptchaSolveException: @@ -686,37 +672,34 @@ async def visit(self, point, spawn_id=None, bootstrap=False): self.error_code = 'EXCEPTION' return False - async def visit_point(self, point, spawn_id, bootstrap): + async def visit(self, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify_conf=conf.NOTIFY): self.handle.cancel() self.error_code = '∞' if bootstrap else '!' - self.log.info('Visiting {0[0]:.4f},{0[1]:.4f}', point) - start = time() + self.log.info('Visiting {0[0]:.4f}, {0[1]:.4f}', self.location) - cell_ids = self.get_cell_ids(point) - since_timestamp_ms = (0,) * len(cell_ids) + cell_ids = get_cell_ids(self.location) request = self.api.create_request() request.get_map_objects(cell_id=cell_ids, - since_timestamp_ms=since_timestamp_ms, - latitude=point[0], - longitude=point[1]) + since_timestamp_ms=(0,) * len(cell_ids), + latitude=self.location[0], + longitude=self.location[1]) diff = self.last_gmo + self.scan_delay - time() if diff > 0: await sleep(diff, loop=LOOP) responses = await self.call(request) - self.last_gmo = self.last_request + self.last_gmo = self.location.time try: map_objects = responses['GET_MAP_OBJECTS'] map_status = map_objects['status'] if map_status != 1: - error = 'GetMapObjects code for {}. Speed: {:.2f}'.format(self.username, self.speed) + error = 'GetMapObjects code {} for {}. Speed: {:.2f}'.format(map_status, self.username, self.speed) self.empty_visits += 1 if self.empty_visits > 3: - reason = '{} empty visits'.format(self.empty_visits) - await self.swap_account(reason) + await self.swap_account('{} empty visits'.format(self.empty_visits)) raise ex.UnexpectedResponseException(error) except KeyError: await self.random_sleep(.5, 1) @@ -725,7 +708,6 @@ async def visit_point(self, point, spawn_id, bootstrap): pokemon_seen = 0 forts_seen = 0 - points_seen = 0 seen_target = not spawn_id try: @@ -737,9 +719,6 @@ async def visit_point(self, point, spawn_id, bootstrap): if conf.ITEM_LIMITS and self.bag_full(): await self.clean_bag() - encounter_conf = conf.ENCOUNTER - notify_conf = conf.NOTIFY - more_points = conf.MORE_POINTS for map_cell in map_objects['map_cells']: request_time_ms = map_cell['current_timestamp_ms'] for pokemon in map_cell.get('wild_pokemons', ()): @@ -786,7 +765,7 @@ async def visit_point(self, point, spawn_id, bootstrap): pokestop = self.normalize_pokestop(fort) db_proc.add(pokestop) if (self.pokestops and not self.bag_full() - and time() > self.next_spin + and monotonic() > self.next_spin and (not conf.SMART_THROTTLE or self.smart_throttle(2))): cooldown = fort.get('cooldown_complete_timestamp_ms') @@ -795,17 +774,6 @@ async def visit_point(self, point, spawn_id, bootstrap): else: db_proc.add(self.normalize_gym(fort)) - if more_points: - try: - for p in map_cell['spawn_points']: - points_seen += 1 - p = p['latitude'], p['longitude'] - if spawns.have_point(p) or p not in bounds: - continue - spawns.cell_points.add(p) - except KeyError: - pass - if spawn_id: db_proc.add({ 'type': 'target', @@ -819,7 +787,7 @@ async def visit_point(self, point, spawn_id, bootstrap): if pokemon_seen > 0: self.error_code = ':' self.total_seen += pokemon_seen - self.g['seen'] += pokemon_seen + Worker.seen += pokemon_seen self.empty_visits = 0 else: self.empty_visits += 1 @@ -835,17 +803,16 @@ async def visit_point(self, point, spawn_id, bootstrap): if conf.MAP_WORKERS: self.worker_dict.update([(self.worker_no, - (point, start, self.speed, self.total_seen, + (point, self.location.time, self.speed, self.total_seen, self.visits, pokemon_seen))]) self.log.info( 'Point processed, {} Pokemon and {} forts seen!', pokemon_seen, - forts_seen, - ) + forts_seen) self.update_accounts_dict() self.handle = LOOP.call_later(60, self.unset_code) - return pokemon_seen + forts_seen + points_seen + return pokemon_seen + forts_seen def smart_throttle(self, requests=1): try: @@ -861,70 +828,68 @@ def smart_throttle(self, requests=1): async def spin_pokestop(self, pokestop): self.error_code = '$' - pokestop_location = pokestop['lat'], pokestop['lon'] - distance = get_distance(self.location, pokestop_location) - # permitted interaction distance - 4 (for some jitter leeway) + distance = self.location.distance_meters(Location(pokestop['lat'], pokestop['lon'])) + # permitted interaction distance - 2 (for some jitter leeway) # estimation of spinning speed limit - if distance > 36 or self.speed > SPINNING_SPEED_LIMIT: + if distance > 38 or self.speed > SPINNING_SPEED_LIMIT: self.error_code = '!' return False # randomize location up to ~1.5 meters - self.simulate_jitter(amount=0.00001) + self.location.jitter(0.00001, 0.00001, 0.25) request = self.api.create_request() - request.fort_details(fort_id = pokestop['external_id'], - latitude = pokestop['lat'], - longitude = pokestop['lon']) + request.fort_details(fort_id=pokestop['external_id'], + latitude=pokestop['lat'], + longitude=pokestop['lon']) responses = await self.call(request, action=1.2) name = responses.get('FORT_DETAILS', {}).get('name') request = self.api.create_request() - request.fort_search(fort_id = pokestop['external_id'], - player_latitude = self.location[0], - player_longitude = self.location[1], - fort_latitude = pokestop['lat'], - fort_longitude = pokestop['lon']) + request.fort_search(fort_id=pokestop['external_id'], + player_latitude=self.location[0], + player_longitude=self.location[1], + fort_latitude=pokestop['lat'], + fort_longitude=pokestop['lon']) responses = await self.call(request, action=2) - result = responses.get('FORT_SEARCH', {}).get('result', 0) - if result == 1: - self.log.info('Spun {}.', name) - elif result == 2: - self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}{}', - name, distance, self.speed, UNIT_STRING) - elif result == 3: - self.log.warning('{} was in the cooldown period.', name) - elif result == 4: - self.log.warning('Could not spin {} because inventory was full. {}', - name, sum(self.items.values())) - elif result == 5: - self.log.warning('Could not spin {} because the daily limit was reached.', name) - self.pokestops = False - else: + try: + result = responses['FORT_SEARCH']['result'] + if result == 1: + self.log.info('Spun {}.', name) + elif result == 2: + self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}{}', + name, distance, self.speed, UNIT_STRING) + elif result == 3: + self.log.warning('{} was in the cooldown period.', name) + elif result == 4: + self.log.warning('Could not spin {} because inventory was full. {}', + name, sum(self.items.values())) + elif result == 5: + self.log.warning('Could not spin {} because the daily limit was reached.', name) + self.pokestops = False + else: + self.log.error('Unknown Pokestop spinning response code: {}', result) + except KeyError: self.log.warning('Failed spinning {}: {}', name, result) - self.next_spin = time() + conf.SPIN_COOLDOWN + self.next_spin = monotonic() + conf.SPIN_COOLDOWN self.error_code = '!' return responses async def encounter(self, pokemon, spawn_id): - distance_to_pokemon = get_distance(self.location, (pokemon['lat'], pokemon['lon'])) + distance_to_pokemon = self.location.distance_meters(Location(pokemon['lat'], pokemon['lon'])) self.error_code = '~' if distance_to_pokemon > 48: percent = 1 - (47 / distance_to_pokemon) - lat_change = (self.location[0] - pokemon['lat']) * percent - lon_change = (self.location[1] - pokemon['lon']) * percent - self.location = ( - self.location[0] - lat_change, - self.location[1] - lon_change) - self.altitude = uniform(self.altitude - 2, self.altitude + 2) - self.api.set_position(*self.location, self.altitude) + self.location[0] -= (self.location[0] - pokemon['lat']) * percent + self.location[1] -= (self.location[1] - pokemon['lon']) * percent + self.jitter(0.000001, 0.000001, 1) delay_required = min((distance_to_pokemon * percent) / 8, 1.1) else: - self.simulate_jitter() + self.jitter(0.00001, 0.00001, .3) delay_required = 1.1 await self.random_sleep(delay_required, delay_required + 1.5) @@ -959,12 +924,12 @@ async def clean_bag(self): rec_items = {} limits = conf.ITEM_LIMITS for item, count in self.items.items(): - if item in limits and count > limits[item]: + try: discard = count - limits[item] - if discard > 50: - rec_items[item] = randint(50, discard) - else: - rec_items[item] = discard + if discard > 0: + rec_items[item] = discard if discard <= 50 else randint(50, discard) + except KeyError: + pass removed = 0 for item, count in rec_items.items(): @@ -972,10 +937,14 @@ async def clean_bag(self): request.recycle_inventory_item(item_id=item, count=count) responses = await self.call(request, action=2) - if responses.get('RECYCLE_INVENTORY_ITEM', {}).get('result', 0) != 1: + try: + result = responses['RECYCLE_INVENTORY_ITEM']['result'] + if result == 1: + removed += count + else: + self.log.warning("Failed to remove item {}, code: {}", item, result) + except KeyError: self.log.warning("Failed to remove item {}", item) - else: - removed += count self.log.info("Removed {} items", removed) self.error_code = '!' @@ -1016,11 +985,13 @@ async def handle_captcha(self, responses): 'key': conf.CAPTCHA_KEY, 'method': 'userrecaptcha', 'googlekey': '6LeeTScTAAAAADqvhqVMhPpr_vB9D364Ia-1dSgK', - 'pageurl': responses.get('CHECK_CHALLENGE', {}).get('challenge_url'), + 'pageurl': responses['CHECK_CHALLENGE']['challenge_url'], 'json': 1 } async with session.post('http://2captcha.com/in.php', params=params) as resp: response = await resp.json(loads=json_loads) + except KeyError: + self.log.error('Challenge URL not found in response.') except CancelledError: raise except Exception as e: @@ -1075,16 +1046,10 @@ async def handle_captcha(self, responses): # try again await self.handle_captcha(responses) - def simulate_jitter(self, amount=0.00002): - '''Slightly randomize location, by up to ~3 meters by default.''' - self.location = randomize_point(self.location) - self.altitude = uniform(self.altitude - 1, self.altitude + 1) - self.api.set_position(*self.location, self.altitude) - def update_accounts_dict(self, captcha=False, banned=False): self.account['captcha'] = captcha self.account['banned'] = banned - self.account['location'] = self.location + self.account['loc'] = self.location self.account['time'] = self.last_request self.account['inventory_timestamp'] = self.inventory_timestamp self.account['items'] = self.items @@ -1144,26 +1109,25 @@ async def new_account(self): self.account = await run_threaded(self.extra_queue.get) self.username = self.account['username'] try: - self.location = self.account['location'][:2] + self.location = self.account['loc'] except KeyError: self.location = get_start_coords(self.worker_no) self.inventory_timestamp = self.account.get('inventory_timestamp') self.player_level = self.account.get('level') - self.last_request = self.account.get('time', 0) - self.last_action = self.last_request - self.last_gmo = self.last_request + self.last_action = self.last_gmo = self.location.time self.items = self.account.get('items', {}) self.num_captchas = 0 self.eggs = {} self.unused_incubators = [] self.initialize_api() self.error_code = None + self.start_time = monotonic() def unset_code(self): self.error_code = None @staticmethod - def normalize_pokemon(raw): + def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): """Normalizes data coming from API into something acceptable by db""" tsm = raw['last_modified_timestamp_ms'] tss = round(tsm / 1000) @@ -1174,7 +1138,7 @@ def normalize_pokemon(raw): 'pokemon_id': raw['pokemon_data']['pokemon_id'], 'lat': raw['latitude'], 'lon': raw['longitude'], - 'spawn_id': get_spawn_id(raw), + 'spawn_id': int(raw['spawn_point_id'], 16) if spawn_int else raw['spawn_point_id'] 'seen': tss } if tth > 0 and tth <= 90000: @@ -1245,10 +1209,6 @@ async def random_sleep(minimum=10.1, maximum=14, loop=LOOP): """Sleeps for a bit""" await sleep(uniform(minimum, maximum), loop=loop) - @property - def start_time(self): - return self.api.start_time - @property def status(self): """Returns status message to be displayed in status screen""" diff --git a/optional-requirements.txt b/optional-requirements.txt index e74381236..209e305f9 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -11,5 +11,6 @@ asyncpg>=0.8 ujson>=1.35 gpsoauth>=0.4.0 flask>=0.11.1 +geopy>=1.11.0 psycopg2>=2.6 mysqlclient>=1.3 diff --git a/requirements.txt b/requirements.txt index f35d5f9e4..215a8376b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,4 @@ -geopy>=1.11.0 protobuf>=3.0.0 -werkzeug>=0.11.15 sqlalchemy>=1.1.0 aiopogo>=1.8.0 polyline>=1.3.1 From b7d0ad16ab69178767da35904aca15a80e8a0157 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Sun, 30 Apr 2017 05:02:06 -0600 Subject: [PATCH 03/38] WIP3 --- config.example.py | 2 +- monocle/overseer.py | 51 +++++++++++++++++++++++------------ monocle/sanitized.py | 4 +-- monocle/utils.py | 51 +++++++++++++---------------------- monocle/web_utils.py | 21 ++++----------- monocle/worker.py | 64 +++++++++++--------------------------------- scan.py | 3 ++- 7 files changed, 79 insertions(+), 117 deletions(-) diff --git a/config.example.py b/config.example.py index 2d7ffffb7..04450720e 100644 --- a/config.example.py +++ b/config.example.py @@ -58,7 +58,7 @@ # Immediately select workers whose speed are below (SPEED_UNIT)p/h instead of # continuing to try to find the worker with the lowest speed. # May increase clustering if you have a high density of workers. -GOOD_ENOUGH = 0.1 +GOOD_ENOUGH = 1.0 # Seconds to sleep after failing to find an eligible worker before trying again. SEARCH_SLEEP = 2.5 diff --git a/monocle/overseer.py b/monocle/overseer.py index a8e3bb1b6..b84577255 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -59,6 +59,23 @@ 'TIMEOUT' } +_unit = conf.SPEED_UNIT.lower() +if _unit == "miles": + # miles/hour to meters/second, default to 19.5mph + SPEED_LIMIT = conf.SPEED_LIMIT * 0.44704 if conf.SPEED_LIMIT else 8.71728 + GOOD_ENOUGH = conf.GOOD_ENOUGH * 0.44704 if conf.GOOD_ENOUGH else 0.44704 +if _unit == "kilometers": + # kilometers/hour to meters/second, default to 31.38km/h + SPEED_LIMIT = conf.SPEED_LIMIT * 1000 / 3600 if conf.SPEED_LIMIT else 8.71728 + GOOD_ENOUGH = conf.GOOD_ENOUGH * 1000 / 3600 if conf.GOOD_ENOUGH else 0.44704 +elif _unit is "meters": + # meters/hour to meters/second + SPEED_LIMIT = conf.SPEED_LIMIT / 3600 if conf.SPEED_LIMIT else 8.71728 + GOOD_ENOUGH = conf.GOOD_ENOUGH / 3600 if conf.GOOD_ENOUGH else 0.44704 +else: + raise ValueError("Valid speed units are: 'miles', 'kilometers', and 'meters'") +del _unit + class Overseer: def __init__(self, manager): @@ -77,7 +94,6 @@ def __init__(self, manager): self.idle_seconds = 0 self.log.info('Overseer initialized') self.pokemon_found = '' - self.jitter_lat, self.jitter_lon = def start(self, status_bar): self.captcha_queue = self.manager.captcha_queue() @@ -530,25 +546,27 @@ async def try_spawn(self, spawn_id, spawn_time=None, skip_time=conf.GIVE_UP_KNOW finally: self.coroutine_semaphore.release() - async def best_worker(self, location, skip_time, _enough=conf.GOOD_ENOUGH, _limit=conf.SPEED_LIMIT): + async def best_worker(self, location, skip_time, _enough=GOOD_ENOUGH, _limit=SPEED_LIMIT): while self.running: gen = (w for w in self.workers if not w.busy.locked()) try: worker = next(gen) - lowest_speed = worker.travel_speed(point) + current_time = time() + lowest_speed = worker.location.speed_with_time(location, current_time) except StopIteration: - lowest_speed = float('inf') - for w in gen: - speed = w.travel_speed(point) - if speed < lowest_speed: - if speed <= _enough: - w.speed = speed - return w - lowest_speed = speed - worker = w - if lowest_speed <= _limit: - worker.speed = lowest_speed - return worker + pass + else: + for w in gen: + speed = w.location.speed_with_time(location, current_time) + if speed < lowest_speed: + if speed <= _enough: + w.speed = speed + return w + lowest_speed = speed + worker = w + if lowest_speed <= _limit: + worker.speed = lowest_speed + return worker if skip_time and monotonic() > skip_time: return None await sleep(conf.SEARCH_SLEEP, loop=LOOP) @@ -556,5 +574,4 @@ async def best_worker(self, location, skip_time, _enough=conf.GOOD_ENOUGH, _limi def refresh_dict(self): while not self.extra_queue.empty(): account = self.extra_queue.get() - username = account['username'] - ACCOUNTS[username] = account + ACCOUNTS[account['username']] = account diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 75a5a94a1..c4603818f 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -160,7 +160,7 @@ 'FULL_TIME': 1800, 'GIVE_UP_KNOWN': 75, 'GIVE_UP_UNKNOWN': 60, - 'GOOD_ENOUGH': 0.1, + 'GOOD_ENOUGH': None, 'GOOGLE_MAPS_KEY': '', 'HASHTAGS': None, 'HOLES': None, @@ -213,7 +213,7 @@ 'SKIP_SPAWN': 90, 'SMART_THROTTLE': False, 'SPAWN_ID_INT': True, - 'SPEED_LIMIT': 19.5, + 'SPEED_LIMIT': None, 'SPEED_UNIT': 'miles', 'SPIN_COOLDOWN': 300, 'SPIN_POKESTOPS': True, diff --git a/monocle/utils.py b/monocle/utils.py index 5cd8b0efe..57ac96759 100644 --- a/monocle/utils.py +++ b/monocle/utils.py @@ -4,7 +4,6 @@ from os.path import join, exists from sys import platform from uuid import uuid4 -from enum import Enum from csv import DictReader from cyrandom import choice from time import time @@ -17,29 +16,6 @@ from . import bounds, sanitized as conf -IPHONES = {'iPhone5,1': 'N41AP', - 'iPhone5,2': 'N42AP', - 'iPhone5,3': 'N48AP', - 'iPhone5,4': 'N49AP', - 'iPhone6,1': 'N51AP', - 'iPhone6,2': 'N53AP', - 'iPhone7,1': 'N56AP', - 'iPhone7,2': 'N61AP', - 'iPhone8,1': 'N71AP', - 'iPhone8,2': 'N66AP', - 'iPhone8,4': 'N69AP', - 'iPhone9,1': 'D10AP', - 'iPhone9,2': 'D11AP', - 'iPhone9,3': 'D101AP', - 'iPhone9,4': 'D111AP'} - - -class Units(Enum): - miles = 1 - kilometers = 2 - meters = 3 - - def best_factors(n): return next(((i, n//i) for i in range(int(n**0.5), 0, -1) if n % i == 0)) @@ -74,12 +50,24 @@ def get_device_info(account): 'device': 'iPhone', 'manufacturer': 'Apple'} try: - if account['iOS'].startswith('1'): - device_info['product'] = 'iOS' - else: - device_info['product'] = 'iPhone OS' + device_info['product'] = 'iOS' if account['iOS'].startswith('1') else 'iPhone OS' device_info['hardware'] = account['model'] + '\x00' - device_info['model'] = IPHONES[account['model']] + '\x00' + iphones = {'iPhone5,1': 'N41AP', + 'iPhone5,2': 'N42AP', + 'iPhone5,3': 'N48AP', + 'iPhone5,4': 'N49AP', + 'iPhone6,1': 'N51AP', + 'iPhone6,2': 'N53AP', + 'iPhone7,1': 'N56AP', + 'iPhone7,2': 'N61AP', + 'iPhone8,1': 'N71AP', + 'iPhone8,2': 'N66AP', + 'iPhone8,4': 'N69AP', + 'iPhone9,1': 'D10AP', + 'iPhone9,2': 'D11AP', + 'iPhone9,3': 'D101AP', + 'iPhone9,4': 'D111AP'} + device_info['model'] = iphones[account['model']] + '\x00' except (KeyError, AttributeError): account = generate_device_info(account) return get_device_info(account) @@ -91,10 +79,9 @@ def get_device_info(account): def generate_device_info(account): ios8 = ('8.0', '8.0.1', '8.0.2', '8.1', '8.1.1', '8.1.2', '8.1.3', '8.2', '8.3', '8.4', '8.4.1') ios9 = ('9.0', '9.0.1', '9.0.2', '9.1', '9.2', '9.2.1', '9.3', '9.3.1', '9.3.2', '9.3.3', '9.3.4', '9.3.5') - ios10 = ('10.0', '10.0.1', '10.0.2', '10.0.3', '10.1', '10.1.1', '10.2', '10.2.1') + ios10 = ('10.0', '10.0.1', '10.0.2', '10.0.3', '10.1', '10.1.1', '10.2', '10.2.1', '10.3', '10.3.1') - devices = tuple(IPHONES.keys()) - account['model'] = choice(devices) + account['model'] = choice(('iPhone5,1', 'iPhone5,2', 'iPhone5,3', 'iPhone5,4', 'iPhone6,1', 'iPhone6,2', 'iPhone7,1', 'iPhone7,2', 'iPhone8,1', 'iPhone8,2', 'iPhone8,4', 'iPhone9,1', 'iPhone9,2', 'iPhone9,3', 'iPhone9,4')) account['id'] = uuid4().hex diff --git a/monocle/web_utils.py b/monocle/web_utils.py index 684b548d2..e38f1a179 100644 --- a/monocle/web_utils.py +++ b/monocle/web_utils.py @@ -5,20 +5,9 @@ from monocle import sanitized as conf from monocle.db import get_forts, Pokestop, session_scope, Sighting, Spawnpoint -from monocle.utils import Units, get_address +from monocle.utils import get_address from monocle.names import DAMAGE, MOVES, POKEMON -if conf.MAP_WORKERS: - try: - UNIT = getattr(Units, conf.SPEED_UNIT.lower()) - if UNIT is Units.miles: - UNIT_STRING = "MPH" - elif UNIT is Units.kilometers: - UNIT_STRING = "KMH" - elif UNIT is Units.meters: - UNIT_STRING = "m/h" - except AttributeError: - UNIT_STRING = "MPH" def get_args(): parser = ArgumentParser() @@ -73,15 +62,15 @@ def data(self): def get_worker_markers(workers): return [{ - 'lat': lat, - 'lon': lon, + 'lat': location[0], + 'lon': location[1], 'worker_no': worker_no, 'time': datetime.fromtimestamp(timestamp).strftime('%I:%M:%S %p'), - 'speed': '{:.1f}{}'.format(speed, UNIT_STRING), + 'speed': '{:.1f}m/s'.format(speed), 'total_seen': total_seen, 'visits': visits, 'seen_here': seen_here - } for worker_no, ((lat, lon), timestamp, speed, total_seen, visits, seen_here) in workers.data] + } for worker_no, (location, timestamp, speed, total_seen, visits, seen_here) in workers.data] def sighting_to_marker(pokemon, names=POKEMON, moves=MOVES, damage=DAMAGE): diff --git a/monocle/worker.py b/monocle/worker.py index f3cf458b1..bcbbfc5ec 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -8,30 +8,21 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc -from pogeo import get_distance, get_cell_ids from .db import SIGHTING_CACHE, MYSTERY_CACHE -from .utils import load_pickle, get_device_info, get_start_coords, Units +from .utils import load_pickle, get_device_info, get_start_coords from .shared import get_logger, LOOP, SessionManager, run_threaded, ACCOUNTS from . import altitudes, avatar, bounds, db_proc, spawns, sanitized as conf if conf.NOTIFY: from .notification import Notifier - -_unit = getattr(Units, conf.SPEED_UNIT.lower()) -if conf.SPIN_POKESTOPS: - if _unit is Units.miles: - SPINNING_SPEED_LIMIT = 21 - UNIT_STRING = "MPH" - elif _unit is Units.kilometers: - SPINNING_SPEED_LIMIT = 34 - UNIT_STRING = "KMH" - elif _unit is Units.meters: - SPINNING_SPEED_LIMIT = 34000 - UNIT_STRING = "m/h" -UNIT = _unit.value -del _unit +if conf.CACHE_CELLS: + from pogeo import CellCache + CELL_CACHE = load_pickle('cellcache') or CellCache() + get_cell_ids = CELL_CACHE.get_cell_ids +else: + from pogeo import get_cell_ids class Worker: @@ -42,20 +33,6 @@ class Worker: seen = 0 captchas = 0 - if conf.CACHE_CELLS: - cells = load_pickle('cells') or {} - - @classmethod - def get_cell_ids(cls, point): - try: - return cls.cells[rounded] - except KeyError: - cells = _pogeo_cell_ids(rounded) - cls.cells[rounded] = cells - return cells - else: - get_cell_ids = _pogeo_cell_ids - login_semaphore = Semaphore(conf.SIMULTANEOUS_LOGINS, loop=LOOP) sim_semaphore = Semaphore(conf.SIMULTANEOUS_SIMULATION, loop=LOOP) @@ -560,14 +537,6 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False await self.handle_captcha(responses) return responses - def travel_speed(self, point): - '''Fast calculation of travel speed to point''' - time_diff = max(time() - self.last_request, self.scan_delay) - distance = get_distance(self.location, point, UNIT) - # conversion from seconds to hours - speed = (distance / time_diff) * 3600 - return speed - async def bootstrap_visit(self, point): for _ in range(3): if await self.visit(point, bootstrap=True): @@ -658,7 +627,7 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): self.error_code = 'MALFORMED RESPONSE' except EmptyGMOException as e: self.error_code = '0' - self.log.warning('Empty GetMapObjects response for {}. Speed: {:.2f}', self.username, self.speed) + self.log.warning('Empty GetMapObjects response for {}. Speed: {:.2f}m/s', self.username, self.speed) except ex.HashServerException as e: self.log.warning('{}', e) self.error_code = 'HASHING ERROR' @@ -696,7 +665,7 @@ async def visit(self, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify map_status = map_objects['status'] if map_status != 1: - error = 'GetMapObjects code {} for {}. Speed: {:.2f}'.format(map_status, self.username, self.speed) + error = 'GetMapObjects code {} for {}. Speed: {:.2f}m/s'.format(map_status, self.username, self.speed) self.empty_visits += 1 if self.empty_visits > 3: await self.swap_account('{} empty visits'.format(self.empty_visits)) @@ -792,7 +761,7 @@ async def visit(self, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify else: self.empty_visits += 1 if forts_seen == 0: - self.log.warning('Nothing seen by {}. Speed: {:.2f}', self.username, self.speed) + self.log.warning('Nothing seen by {}. Speed: {:.2f}m/s', self.username, self.speed) self.error_code = '0 SEEN' else: self.error_code = ',' @@ -802,9 +771,8 @@ async def visit(self, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify self.visits += 1 if conf.MAP_WORKERS: - self.worker_dict.update([(self.worker_no, - (point, self.location.time, self.speed, self.total_seen, - self.visits, pokemon_seen))]) + self.worker_dict[self.worker_no] = (point, self.location.time, + self.speed, self.total_seen, self.visits, pokemon_seen) self.log.info( 'Point processed, {} Pokemon and {} forts seen!', pokemon_seen, @@ -829,9 +797,9 @@ def smart_throttle(self, requests=1): async def spin_pokestop(self, pokestop): self.error_code = '$' distance = self.location.distance_meters(Location(pokestop['lat'], pokestop['lon'])) - # permitted interaction distance - 2 (for some jitter leeway) + # permitted interaction distance - 2 (for some jitter/calculation leeway) # estimation of spinning speed limit - if distance > 38 or self.speed > SPINNING_SPEED_LIMIT: + if distance > 38.0 or self.speed > 8.611: self.error_code = '!' return False @@ -858,8 +826,8 @@ async def spin_pokestop(self, pokestop): if result == 1: self.log.info('Spun {}.', name) elif result == 2: - self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}{}', - name, distance, self.speed, UNIT_STRING) + self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}m/s', + name, distance, self.speed) elif result == 3: self.log.warning('{} was in the cooldown period.', name) elif result == 4: diff --git a/scan.py b/scan.py index 433929a3c..9d4435c8e 100755 --- a/scan.py +++ b/scan.py @@ -153,7 +153,8 @@ def cleanup(overseer, manager): FORT_CACHE.pickle() altitudes.pickle() if conf.CACHE_CELLS: - dump_pickle('cells', Worker.cells) + from monocle.worker import CELL_CACHE + dump_pickle('cellcache', CELL_CACHE) spawns.pickle() while not db_proc.queue.empty(): From d2441b738121267ef77ec6215dd1747600915512 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 2 May 2017 16:26:49 -0600 Subject: [PATCH 04/38] Use pogeo's AltitudeCache --- config.example.py | 48 ++++++------ monocle/altitudes.py | 182 +++++++------------------------------------ monocle/sanitized.py | 4 +- monocle/worker.py | 12 +-- requirements.txt | 1 - scan.py | 3 +- 6 files changed, 63 insertions(+), 187 deletions(-) diff --git a/config.example.py b/config.example.py index 04450720e..5b9c03d8c 100644 --- a/config.example.py +++ b/config.example.py @@ -7,13 +7,13 @@ AREA_NAME = 'SLC' # the city or region you are scanning LANGUAGE = 'EN' # ISO 639-1 codes EN, DE, ES, FR, IT, JA, KO, or ZH for Pokémon/move names MAX_CAPTCHAS = 100 # stop launching new visits if this many CAPTCHAs are pending -SCAN_DELAY = 10 # wait at least this many seconds before scanning with the same account +SCAN_DELAY = 10.0 # wait at least this many seconds before scanning with the same account SPEED_UNIT = 'miles' # valid options are 'miles', 'kilometers', 'meters' SPEED_LIMIT = 19.5 # limit worker speed to this many SPEED_UNITs per hour # The number of simultaneous workers will be these two numbers multiplied. # On the initial run, workers will arrange themselves in a grid across the -# rectangle you defined with MAP_START and MAP_END. +# the boundaries you define below. # The rows/columns will also be used for the dot grid in the console output. # Provide more accounts than the product of your grid to allow swapping. GRID = (4, 4) # rows, columns @@ -22,11 +22,24 @@ # any spawn points have been discovered MAP_START = (40.7913, -111.9398) MAP_END = (40.7143, -111.8046) - # do not visit spawn points outside of your MAP_START and MAP_END rectangle -# the boundaries will be the rectangle created by MAP_START and MAP_END, unless STAY_WITHIN_MAP = True +## alternatively define a polygon to use as boundaries +## must be a tuple of tuples (containing coordinates for vertices) +## if BOUNDARIES is set, MAP_START, MAP_END, and STAY_WITHIN_MAP will be ignored +#BOUNDARIES = ((40.799609, -111.948556), (40.792749, -111.887341), (40.779264, -111.838078), (40.761410, -111.817908), (40.728636, -111.805293), (40.688833, -111.785564), (40.689768, -111.919389), (40.750461, -111.949938)) + +## alternatively define multiple polygons to use as boundaries +## must be a tuple of tuples of tuples (containing coordinates for vertices) +#MULTI_BOUNDARIES = (((40.252083, -111.654868), (40.24589, -111.65413), (40.2454018, -111.64340), (40.252509, -111.64268)), ((40.2388, -111.643066), (40.23894, -111.63165), (40.23426, -111.63311), (40.2354, -111.64014))) + +## if using BOUNDARIES or MULTI_BOUNDARIES you may define polygonal holes +## workers will stay out of these holes as if they were out of bounds +## must be a tuple of tuples of tuples +# for only one hole do a tuple of tuples and a trailing comma, like so: +#HOLES = ((40.795, -111.94), (40.79, -111.88), (40.77, -111.83)), + # ensure that you visit within this many meters of every part of your map during bootstrap # lower values are more thorough but will take longer BOOTSTRAP_RADIUS = 120 @@ -63,13 +76,6 @@ # Seconds to sleep after failing to find an eligible worker before trying again. SEARCH_SLEEP = 2.5 -## alternatively define a Polygon to use as boundaries (requires shapely) -## if BOUNDARIES is set, STAY_WITHIN_MAP will be ignored -## more information available in the shapely manual: -## http://toblerity.org/shapely/manual.html#polygons -#from shapely.geometry import Polygon -#BOUNDARIES = Polygon(((40.799609, -111.948556), (40.792749, -111.887341), (40.779264, -111.838078), (40.761410, -111.817908), (40.728636, -111.805293), (40.688833, -111.785564), (40.689768, -111.919389), (40.750461, -111.949938))) - # key for Bossland's hashing server, otherwise the old hashing lib will be used #HASH_KEY = '9d87af14461b93cb3605' # this key is fake @@ -129,7 +135,7 @@ # Update the console output every x seconds REFRESH_RATE = 0.75 # 750ms # Update the seen/speed/visit/speed stats every x seconds -STAT_REFRESH = 5 +STAT_REFRESH = 5.0 # sent with GET_PLAYER requests, should match your region PLAYER_LOCALE = {'country': 'US', 'language': 'en', 'timezone': 'America/Denver'} @@ -140,9 +146,6 @@ # number of seconds before timing out on a login request LOGIN_TIMEOUT = 2.5 -# add spawn points reported in cell_ids to the unknown spawns list -#MORE_POINTS = False - # Set to True to kill the scanner when a newer version is forced #FORCED_KILL = False @@ -161,13 +164,14 @@ # used for altitude queries and maps in reports #GOOGLE_MAPS_KEY = 'OYOgW1wryrp2RKJ81u7BLvHfYUA6aArIyuQCXu4' # this key is fake REPORT_MAPS = True # Show maps on reports -#ALT_RANGE = (1250, 1450) # Fall back to altitudes in this range if Google query fails -## Round altitude coordinates to this many decimal places -## More precision will lead to larger caches and more Google API calls -## Maximum distance from coords to rounded coords for precisions (at Lat40): -## 1: 7KM, 2: 700M, 3: 70M, 4: 7M -#ALT_PRECISION = 2 +## S2 cell level to fetch altitudes for +## Higher levels will lead to a larger cache and more Google Elevation API requests +## Average diameter of some levels: +## 9: 17.85km, 10: 8.93km, 11: 4.46km, 12: 2.23km, 13: 1.12km, 14: 558m, 15: 279m +#ALT_LEVEL = 12 + +#ALT_RANGE = (400.0, 500.0) # Fall back to altitudes in this range if Google query fails ## Automatically resolve captchas using 2Captcha key. #CAPTCHA_KEY = '1abc234de56fab7c89012d34e56fa7b8' @@ -208,7 +212,7 @@ #MANAGER_ADDRESS = ('127.0.0.1', 5002) # could be used for CAPTCHA solving and live worker maps on remote systems # Store the cell IDs so that they don't have to be recalculated every visit. -# Enabling will (potentially drastically) increase memory usage. +# Enabling will increase memory usage. #CACHE_CELLS = False # Only for use with web_sanic (requires PostgreSQL) diff --git a/monocle/altitudes.py b/monocle/altitudes.py index 1fe3d5ff3..364fcad89 100644 --- a/monocle/altitudes.py +++ b/monocle/altitudes.py @@ -1,167 +1,41 @@ -import sys +from os import join -from asyncio import gather, CancelledError -from statistics import mean - -from aiohttp import ClientSession -from polyline import encode as polyencode -from aiopogo import json_loads -from cyrandom import uniform +from pogeo.altitude import AltitudeCache from . import bounds, sanitized as conf -from .shared import get_logger, LOOP, run_threaded -from .utils import dump_pickle, float_range, load_pickle, round_coords +from .shared import get_logger -class Altitudes: - """Manage altitudes""" - __slots__ = ('altitudes', 'changed', 'fallback', 'log', 'mean') +log = get_logger('altitudes') - def __init__(self): - self.log = get_logger('altitudes') - self.changed = False - self.load() - if len(self.altitudes) > 5: - self.fallback = self.average - else: - self.fallback = self.random - async def get_all(self): - self.log.info('Fetching all altitudes') +ALTITUDES = AltitudeCache(conf.ALT_LEVEL, conf.ALT_RANGE[0], conf.ALT_RANGE[1]) - coords = self.get_coords() +set_altitude = ALTITUDES.set_alt - async with ClientSession(loop=LOOP) as session: - if len(coords) < 300: - await self.fetch_alts(coords, session) - else: - tasks = [self.fetch_alts(chunk, session) - for chunk in self.chunks(coords)] - await gather(*tasks, loop=LOOP) - self.changed = True - LOOP.create_task(run_threaded(self.pickle)) - async def fetch_alts(self, coords, session, precision=conf.ALT_PRECISION): - try: - async with session.get( - 'https://maps.googleapis.com/maps/api/elevation/json', - params={'locations': 'enc:' + polyencode(coords), - 'key': conf.GOOGLE_MAPS_KEY}, - timeout=10) as resp: - response = await resp.json(loads=json_loads) - for r in response['results']: - coords = round_coords((r['location']['lat'], r['location']['lng']), precision) - self.altitudes[coords] = r['elevation'] - if not self.altitudes: - self.log.error(response['error_message']) - except Exception: - self.log.exception('Error fetching altitudes.') +def load_alts(): + pickle_path = join(conf.DIRECTORY, 'pickles', 'altcache.pickle') + try: + unpickled = ALTITUDES.unpickle(pickle_path, bounds) + except (FileNotFoundError, EOFError): + unpickled = False + except Exception: + unpickled = False + log.exception('Error while trying to unpickle altitudes.') - def get(self, point, randomize=uniform): - point = round_coords(point, conf.ALT_PRECISION) - alt = self.altitudes[point] - return randomize(alt - 2.5, alt + 2.5) - - async def fetch(self, point, key=conf.GOOGLE_MAPS_KEY): - if not key: - return self.fallback() + if not unpickled: try: - async with ClientSession(loop=LOOP) as session: - async with session.get( - 'https://maps.googleapis.com/maps/api/elevation/json', - params={'locations': '{0[0]},{0[1]}'.format(point), - 'key': key}, - timeout=10) as resp: - response = await resp.json(loads=json_loads) - altitude = response['results'][0]['elevation'] - self.altitudes[point] = altitude - self.changed = True - return altitude - except CancelledError: - raise - except Exception: + ALTITUDES.fetch_all() + except Exception(): + log.exception('Error while fetching altitudes.') + if ALTITUDES: + log.warning('{} altitudes fetched.', len(ALTITUDES)) try: - self.log.error(response['error_message']) - except (KeyError, NameError): - self.log.error('Error fetching altitude for {}.', point) - return self.fallback() - - def average(self, randomize=uniform): - self.log.info('Fell back to average altitude.') - try: - return randomize(self.mean - 15.0, self.mean + 15.0) - except AttributeError: - self.mean = mean(self.altitudes.values()) - return self.average() - - def random(self, alt_range=conf.ALT_RANGE, randomize=uniform): - self.log.info('Fell back to random altitude.') - return randomize(*conf.ALT_RANGE) - - def load(self): - try: - state = load_pickle('altitudes', raise_exception=True) - except FileNotFoundError: - self.log.info('No altitudes pickle found.') - self.altitudes = {} - LOOP.run_until_complete(self.get_all()) - return - - if state['bounds_hash'] == hash(bounds): - if state['precision'] == conf.ALT_PRECISION and state['altitudes']: - self.altitudes = state['altitudes'] - return - elif state['precision'] < conf.ALT_PRECISION: - self.altitudes = state['altitudes'] - LOOP.run_until_complete(self.get_all()) - return - elif state['precision'] <= conf.ALT_PRECISION: - pickled_alts = state['altitudes'] - - to_remove = [] - for coords in pickled_alts.keys(): - if coords not in bounds: - to_remove.append(coords) - for key in to_remove: - del pickled_alts[key] - - self.altitudes = pickled_alts - LOOP.run_until_complete(self.get_all()) - return - self.altitudes = {} - LOOP.run_until_complete(self.get_all()) - - def pickle(self): - if self.changed: - state = { - 'altitudes': self.altitudes, - 'precision': conf.ALT_PRECISION, - 'bounds_hash': hash(bounds) - } - dump_pickle('altitudes', state) - self.changed = False - - def get_coords(self, bounds=bounds, precision=conf.ALT_PRECISION): - coords = [] - if bounds.multi: - for b in bounds.polygons: - coords.extend(self.get_coords(b)) - return coords - step = 1 / (10 ** precision) - west, east = bounds.west, bounds.east - existing = self.altitudes.keys() if self.altitudes else False - for lat in float_range(bounds.south, bounds.north, step): - for lon in float_range(west, east, step): - point = lat, lon - if not existing or point not in existing: - coords.append(round_coords(point, precision)) - return coords - - @staticmethod - def chunks(l, n=300): - """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i:i + n] - - -sys.modules[__name__] = Altitudes() + ALTITUDES.pickle(pickle_path) + except Exception: + log.exception('Error while dumping altitude pickle.') + else: + log.warning('No altitudes fetched, will use random values within ALT_RANGE.') + global set_altitude + set_altitude = ALTITUDES.set_random diff --git a/monocle/sanitized.py b/monocle/sanitized.py index c4603818f..0e1ea86a8 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -20,7 +20,7 @@ _valid_types = { 'ACCOUNTS': set_sequence, 'ACCOUNTS_CSV': path, - 'ALT_PRECISION': int, + 'ALT_LEVEL': int, 'ALT_RANGE': sequence, 'ALWAYS_NOTIFY': int, 'ALWAYS_NOTIFY_IDS': set_sequence_range, @@ -133,7 +133,7 @@ _defaults = { 'ACCOUNTS': None, 'ACCOUNTS_CSV': None, - 'ALT_PRECISION': 2, + 'ALT_LEVEL': 12, 'ALT_RANGE': (300, 400), 'ALWAYS_NOTIFY': 0, 'ALWAYS_NOTIFY_IDS': set(), diff --git a/monocle/worker.py b/monocle/worker.py index bcbbfc5ec..84fd7b5d5 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -9,10 +9,11 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc +from .altitudes import load_alts, set_altitude from .db import SIGHTING_CACHE, MYSTERY_CACHE from .utils import load_pickle, get_device_info, get_start_coords from .shared import get_logger, LOOP, SessionManager, run_threaded, ACCOUNTS -from . import altitudes, avatar, bounds, db_proc, spawns, sanitized as conf +from . import avatar, bounds, db_proc, spawns, sanitized as conf if conf.NOTIFY: from .notification import Notifier @@ -24,6 +25,8 @@ else: from pogeo import get_cell_ids +load_alts() + class Worker: """Single worker walking on the map""" @@ -95,7 +98,7 @@ def initialize_api(self): self.empty_visits = 0 self.api = PGoApi(device_info=device_info) - self.api.set_position(*self.location, self.altitude) + self.api.location = self.location if self.proxies: self.api.proxy = next(self.proxies) try: @@ -549,10 +552,7 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): """Wrapper for visit that sets location and logs in if necessary """ try: - try: - point.altitude = altitudes.get(point) - except KeyError: - point.altitude = await altitudes.fetch(point) + set_altitude(point) self.location = point self.api.location = self.location if not self.authenticated: diff --git a/requirements.txt b/requirements.txt index 215a8376b..347480efa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ protobuf>=3.0.0 sqlalchemy>=1.1.0 aiopogo>=1.8.0 -polyline>=1.3.1 aiohttp>=2.0.7,<2.1 pogeo>=0.4.0 cyrandom>=0.3.0 diff --git a/scan.py b/scan.py index 9d4435c8e..109b76730 100755 --- a/scan.py +++ b/scan.py @@ -28,7 +28,7 @@ from monocle.worker import Worker from monocle.overseer import Overseer from monocle.db import FORT_CACHE -from monocle import altitudes, db_proc, spawns +from monocle import db_proc, spawns class AccountManager(BaseManager): @@ -151,7 +151,6 @@ def cleanup(overseer, manager): print('Dumping pickles...') dump_pickle('accounts', ACCOUNTS) FORT_CACHE.pickle() - altitudes.pickle() if conf.CACHE_CELLS: from monocle.worker import CELL_CACHE dump_pickle('cellcache', CELL_CACHE) From ec1cc6f6b09943f03f4b6bd995e694d5b773580f Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 2 May 2017 16:27:37 -0600 Subject: [PATCH 05/38] Update solve_captchas --- solve_captchas.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/solve_captchas.py b/solve_captchas.py index 98d7b4f99..2862ddce0 100755 --- a/solve_captchas.py +++ b/solve_captchas.py @@ -11,8 +11,9 @@ from aiopogo import PGoApi, close_sessions, activate_hash_server, exceptions as ex from aiopogo.auth_ptc import AuthPtc -from monocle import altitudes, sanitized as conf -from monocle.utils import get_device_info, get_address, randomize_point +from monocle import sanitized as conf +from monocle.altitudes import load_alts, set_altitude +from monocle.utils import get_device_info, get_address from monocle.bounds import center @@ -51,6 +52,7 @@ async def solve_captcha(url, api, driver, timestamp): async def main(): try: + load_alts() class AccountManager(BaseManager): pass AccountManager.register('captcha_queue') AccountManager.register('extra_queue') @@ -67,22 +69,18 @@ class AccountManager(BaseManager): pass while not captcha_queue.empty(): account = captcha_queue.get() username = account.get('username') - location = account.get('location') - if location and location != (0,0,0): - lat = location[0] - lon = location[1] - else: - lat, lon = randomize_point(center, 0.0001) - try: - alt = altitudes.get((lat, lon)) - except KeyError: - alt = await altitudes.fetch((lat, lon)) + location = account['loc'] + except Exception: + location = center + location.jitter(0.0001, 0.0001, 2.0) + + set_altitude(location) try: device_info = get_device_info(account) api = PGoApi(device_info=device_info) - api.set_position(lat, lon, alt) + api.location = location authenticated = False try: @@ -118,7 +116,7 @@ class AccountManager(BaseManager): pass responses = response['responses'] challenge_url = responses['CHECK_CHALLENGE']['challenge_url'] timestamp = responses.get('GET_INVENTORY', {}).get('inventory_delta', {}).get('new_timestamp_ms') - account['location'] = lat, lon, alt + account['location'] = location account['inventory_timestamp'] = timestamp if challenge_url == ' ': account['captcha'] = False From 9861703026ce3e14172da7aa0bfb016bb6e271b9 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 2 May 2017 17:11:18 -0600 Subject: [PATCH 06/38] DB improvements --- monocle/__init__.py | 9 +++++- monocle/db.py | 77 ++++++++++++++++++++++----------------------- monocle/overseer.py | 8 ++--- monocle/spawns.py | 13 ++++---- 4 files changed, 53 insertions(+), 54 deletions(-) diff --git a/monocle/__init__.py b/monocle/__init__.py index 8e78de905..2b5924538 100644 --- a/monocle/__init__.py +++ b/monocle/__init__.py @@ -1,5 +1,12 @@ __title__ = 'monocle' -__version__ = '0.8b1' +__version__ = '0.8b2' __author__ = 'David Christenson' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2017 David Christenson ' + +from . import sanitized + +if sanitized.SPAWN_ID_INT: + from pogeo import cellid_to_location as spawnid_to_loc +else: + from pogeo import token_to_location as spawnid_to_loc diff --git a/monocle/db.py b/monocle/db.py index c58ec46ca..6e33df663 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -12,7 +12,7 @@ from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION from sqlalchemy.ext.declarative import declarative_base -from . import bounds, spawns, db_proc, sanitized as conf +from . import bounds, db_proc, spawns, spawnid_to_loc, sanitized as conf from .utils import time_until_time, dump_pickle, load_pickle from .shared import call_at, get_logger @@ -55,7 +55,7 @@ class TextInt(TypeDecorator): impl = Text def process_bind_param(self, value, dialect): - return str(value) + return repr(value) def process_result_value(self, value, dialect): return int(value) @@ -67,6 +67,8 @@ def process_result_value(self, value, dialect): ID_TYPE = BigInteger if conf.SPAWN_ID_INT else String(11) +DB_HASH = sha256(conf.DB_ENGINE.encode('utf-8')).digest() + class Team(Enum): none = 0 @@ -202,10 +204,9 @@ def unpickle(self): _engine = create_engine(conf.DB_ENGINE) Session = sessionmaker(bind=_engine) DB_TYPE = _engine.name -DB_HASH = sha256(conf.DB_ENGINE.encode()).digest() if conf.REPORT_SINCE: - SINCE_TIME = mktime(conf.REPORT_SINCE.timetuple()) + SINCE_TIME = int(mktime(conf.REPORT_SINCE.timetuple())) SINCE_QUERY = 'WHERE expire_timestamp > {}'.format(SINCE_TIME) else: SINCE_QUERY = '' @@ -328,31 +329,33 @@ def session_scope(autoflush=False): session.close() -def add_sighting(session, pokemon): +def add_sighting(session, pokemon, _cache=SIGHTING_CACHE, _encounter=conf.ENCOUNTER): # Check if there isn't the same entry already - if pokemon in SIGHTING_CACHE: + if pokemon in _cache: return if session.query(exists().where(and_( Sighting.expire_timestamp == pokemon['expire_timestamp'], Sighting.encounter_id == pokemon['encounter_id'])) ).scalar(): - SIGHTING_CACHE.add(pokemon) + _cache.add(pokemon) return obj = Sighting( pokemon_id=pokemon['pokemon_id'], spawn_id=pokemon['spawn_id'], encounter_id=pokemon['encounter_id'], expire_timestamp=pokemon['expire_timestamp'], - lat=pokemon['lat'], - lon=pokemon['lon'], - atk_iv=pokemon.get('individual_attack'), - def_iv=pokemon.get('individual_defense'), - sta_iv=pokemon.get('individual_stamina'), - move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2') ) + if _encounter: + try: + obj.atk_iv = pokemon['individual_attack'] + obj.def_iv = pokemon['individual_defense'] + obj.sta_iv = pokemon['individual_stamina'] + obj.move_1 = pokemon['move_1'] + obj.move_2 = pokemon['move_2'] + except KeyError: + pass session.add(obj) - SIGHTING_CACHE.add(pokemon) + _cache.add(pokemon) def add_spawnpoint(session, pokemon): @@ -368,7 +371,6 @@ def add_spawnpoint(session, pokemon): .filter(Spawnpoint.spawn_id == spawn_id) \ .first() now = round(time()) - point = pokemon['lat'], pokemon['lon'] spawns.add_known(spawn_id, new_time) if existing: existing.updated = now @@ -391,8 +393,6 @@ def add_spawnpoint(session, pokemon): session.add(Spawnpoint( spawn_id=spawn_id, despawn_time=new_time, - lat=pokemon['lat'], - lon=pokemon['lon'], updated=now, duration=duration, failures=0 @@ -408,20 +408,16 @@ def add_mystery_spawnpoint(session, pokemon): session.add(Spawnpoint( spawn_id=spawn_id, - despawn_time=None, - lat=pokemon['lat'], - lon=pokemon['lon'], updated=0, - duration=None, failures=0 )) - if Location(pokemon['lat'], pokemon['lon']) in bounds: + if spawnid_to_loc(spawn_id) in bounds: spawns.unknowns.add(spawn_id) -def add_mystery(session, pokemon): - if pokemon in MYSTERY_CACHE: +def add_mystery(session, pokemon, _cache=SIGHTING_CACHE, _encounter=conf.ENCOUNTER): + if pokemon in _cache: return add_mystery_spawnpoint(session, pokemon) existing = session.query(Mystery) \ @@ -437,20 +433,22 @@ def add_mystery(session, pokemon): pokemon_id=pokemon['pokemon_id'], spawn_id=pokemon['spawn_id'], encounter_id=pokemon['encounter_id'], - lat=pokemon['lat'], - lon=pokemon['lon'], first_seen=pokemon['seen'], first_seconds=seconds, last_seconds=seconds, - seen_range=0, - atk_iv=pokemon.get('individual_attack'), - def_iv=pokemon.get('individual_defense'), - sta_iv=pokemon.get('individual_stamina'), - move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2') + seen_range=0 ) + if _encounter: + try: + obj.atk_iv = pokemon['individual_attack'] + obj.def_iv = pokemon['individual_defense'] + obj.sta_iv = pokemon['individual_stamina'] + obj.move_1 = pokemon['move_1'] + obj.move_2 = pokemon['move_2'] + except KeyError: + pass session.add(obj) - MYSTERY_CACHE.add(pokemon) + _cache.add(pokemon) def add_fort_sighting(session, raw_fort): @@ -709,7 +707,7 @@ def get_rare_pokemon(session): result = [] for pokemon_id in conf.RARE_IDS: - query = session.query(Sighting) \ + query = session.query(Sighting.id) \ .filter(Sighting.pokemon_id == pokemon_id) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -729,8 +727,7 @@ def get_nonexistent_pokemon(session): def get_all_sightings(session, pokemon_ids): - # TODO: rename this and get_sightings - query = session.query(Sighting) \ + query = session.query(Sighting.pokemon_id, Sighting.spawn_id) \ .filter(Sighting.pokemon_id.in_(pokemon_ids)) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -773,7 +770,7 @@ def get_spawns_per_hour(session, pokemon_id): def get_total_spawns_count(session, pokemon_id): - query = session.query(Sighting) \ + query = session.query(Sighting.id) \ .filter(Sighting.pokemon_id == pokemon_id) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -781,9 +778,9 @@ def get_total_spawns_count(session, pokemon_id): def get_all_spawn_coords(session, pokemon_id=None): - points = session.query(Sighting.lat, Sighting.lon) + points = session.query(Sighting.spawn_id) if pokemon_id: points = points.filter(Sighting.pokemon_id == int(pokemon_id)) if conf.REPORT_SINCE: points = points.filter(Sighting.expire_timestamp > SINCE_TIME) - return points.all() + return [spawnid_to_loc(x[0]).coords for x in points] diff --git a/monocle/overseer.py b/monocle/overseer.py index b84577255..52b7796d5 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -8,19 +8,15 @@ from time import time, monotonic from aiopogo import HashServer +from pogeo import diagonal_distance, level_edge from sqlalchemy.exc import OperationalError from .db import SIGHTING_CACHE, MYSTERY_CACHE from .utils import get_current_hour, dump_pickle, get_start_coords, best_factors, percentage_split from .shared import ACCOUNTS, get_logger, LOOP, run_threaded -from . import bounds, db_proc, spawns, sanitized as conf +from . import bounds, db_proc, spawnid_to_loc, spawns, sanitized as conf from .worker import Worker -if conf.SPAWN_ID_INT: - from pogeo import diagonal_distance, level_edge, cellid_to_location as spawnid_to_loc -else: - from pogeo import diagonal_distance, level_edge, token_to_location as spawnid_to_loc - ANSI = '\x1b[2J\x1b[H' if platform == 'win32': diff --git a/monocle/spawns.py b/monocle/spawns.py index a8d04efd1..954f0bc0c 100644 --- a/monocle/spawns.py +++ b/monocle/spawns.py @@ -4,7 +4,6 @@ from time import time from . import bounds, db, sanitized as conf -from .db import DB_HASH, session_scope, Spawnpoint from .shared import get_logger from .utils import dump_pickle, load_pickle, get_current_hour, time_until_time @@ -46,8 +45,8 @@ def add_known(self, spawn_id, despawn_time): self.unknown.discard(spawn_id) def update(self, _migration=conf.LAST_MIGRATION, _contains=contains_spawn): - with session_scope() as session: - query = session.query(Spawnpoint.spawn_id, Spawnpoint.despawn_time, Spawnpoint.duration, Spawnpoint.updated) + with db.session_scope() as session: + query = session.query(db.Spawnpoint.spawn_id, db.Spawnpoint.despawn_time, db.Spawnpoint.duration, db.Spawnpoint.updated) known = {} for spawn_id, despawn_time, duration, updated in query: # skip if point is not within boundaries (if applicable) @@ -81,9 +80,9 @@ def get_despawn_time(self, spawn_id, seen): def unpickle(self): try: state = load_pickle('spawns', raise_exception=True) - if (state['class_version'] == 4, - and state['db_hash'] == DB_HASH, - and state['bounds_hash'] == hash(bounds), + if (state['class_version'] == 4 + and state['db_hash'] == db.DB_HASH + and state['bounds_hash'] == hash(bounds) and state['last_migration'] == conf.LAST_MIGRATION): self.despawn_times = state['despawn_times'] self.known = state['known'] @@ -101,7 +100,7 @@ def pickle(self): dump_pickle('spawns', { 'bounds_hash': hash(bounds), 'class_version': 4, - 'db_hash': DB_HASH, + 'db_hash': db.DB_HASH, 'despawn_times': self.despawn_times, 'known': self.known, 'last_migration': conf.LAST_MIGRATION, From da68e7029e45277d4d32da7f68184d8fe4d4f318 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 2 May 2017 18:13:40 -0600 Subject: [PATCH 07/38] Small Travis improvement --- .travis.yml | 8 ++++++-- setup.py | 4 ---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index dbee6390d..731bac317 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,18 +12,22 @@ matrix: dist: trusty python: 3.6 language: python + - os: osx + osx_image: xcode6.4 + language: generic - os: osx osx_image: xcode7.3 language: generic - os: osx - osx_image: xcode8.2 + osx_image: xcode8.3 language: generic before_install: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update && brew install python3; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update && brew install python3 || brew upgrade python3; fi install: - pip3 install -r requirements.txt + - cp config.example.py monocle/config.py - python3 setup.py install script: diff --git a/setup.py b/setup.py index d5660f9fc..42165f269 100755 --- a/setup.py +++ b/setup.py @@ -1,13 +1,9 @@ #!/usr/bin/env python3 from setuptools import setup -from os.path import exists -from shutil import copyfile from monocle import __version__ as version, __title__ as name -if not exists('monocle/config.py'): - copyfile('config.example.py', 'monocle/config.py') setup( name=name, From 013d745f10eb8222a95d381b7a23b239e1545ee0 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Wed, 10 May 2017 15:57:52 -0600 Subject: [PATCH 08/38] WIP4 --- config.example.py | 30 ++++++++-------- monocle/__init__.py | 2 ++ monocle/altitudes.py | 2 +- monocle/db.py | 6 ++-- monocle/names.py | 24 ++++++------- monocle/sanitized.py | 81 +++++++++++++++++++++++++++++--------------- monocle/web_utils.py | 51 ++-------------------------- monocle/worker.py | 10 +++--- web.py | 35 +++++++++++++++---- 9 files changed, 122 insertions(+), 119 deletions(-) diff --git a/config.example.py b/config.example.py index 5b9c03d8c..1c3f2d9c3 100644 --- a/config.example.py +++ b/config.example.py @@ -44,13 +44,13 @@ # lower values are more thorough but will take longer BOOTSTRAP_RADIUS = 120 -GIVE_UP_KNOWN = 75 # try to find a worker for a known spawn for this many seconds before giving up -GIVE_UP_UNKNOWN = 60 # try to find a worker for an unknown point for this many seconds before giving up -SKIP_SPAWN = 90 # don't even try to find a worker for a spawn if the spawn time was more than this many seconds ago +GIVE_UP_KNOWN = 75.0 # try to find a worker for a known spawn for this many seconds before giving up +GIVE_UP_UNKNOWN = 60.0 # try to find a worker for an unknown point for this many seconds before giving up +SKIP_SPAWN = 90.0 # don't even try to find a worker for a spawn if the spawn time was more than this many seconds ago # How often should the mystery queue be reloaded (default 90s) # this will reduce the grouping of workers around the last few mysteries -#RESCAN_UNKNOWN = 90 +#RESCAN_UNKNOWN = 90.0 # filename of accounts CSV ACCOUNTS_CSV = 'accounts.csv' @@ -91,7 +91,7 @@ # Defaults to whatever will allow every worker to be swapped within 6 hours #SWAP_OLDEST = 300 # 5 minutes # Only swap if it's been active for more than x minutes -#MINIMUM_RUNTIME = 10 +#MINIMUM_RUNTIME = 10.0 ### these next 6 options use more requests but look more like the real client APP_SIMULATION = True # mimic the actual app's login requests @@ -105,11 +105,11 @@ # 'notifying' will encounter Pokémon that are eligible for notifications # None will never encounter Pokémon ENCOUNTER = None -#ENCOUNTER_IDS = (3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134) +#ENCOUNTER_IDS = {3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134} # PokéStops SPIN_POKESTOPS = True # spin all PokéStops that are within range -SPIN_COOLDOWN = 300 # spin only one PokéStop every n seconds (default 300) +SPIN_COOLDOWN = 300.0 # spin only one PokéStop every n seconds # minimum number of each item to keep if the bag is cleaned # bag cleaning is disabled if this is not present or is commented out @@ -150,13 +150,13 @@ #FORCED_KILL = False # exclude these Pokémon from the map by default (only visible in trash layer) -TRASH_IDS = ( +TRASH_IDS = { 16, 19, 21, 29, 32, 41, 46, 48, 50, 52, 56, 74, 77, 96, 111, 133, 161, 163, 167, 177, 183, 191, 194 -) +} # include these Pokémon on the "rare" report -RARE_IDS = (3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134) +RARE_IDS = {3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134} from datetime import datetime REPORT_SINCE = datetime(2017, 2, 17) # base reports on data from after this date @@ -169,9 +169,9 @@ ## Higher levels will lead to a larger cache and more Google Elevation API requests ## Average diameter of some levels: ## 9: 17.85km, 10: 8.93km, 11: 4.46km, 12: 2.23km, 13: 1.12km, 14: 558m, 15: 279m -#ALT_LEVEL = 12 +#ALT_LEVEL = 13 -#ALT_RANGE = (400.0, 500.0) # Fall back to altitudes in this range if Google query fails +#ALT_RANGE = (390.0, 490.0) # Fall back to altitudes in this range if Google query fails ## Automatically resolve captchas using 2Captcha key. #CAPTCHA_KEY = '1abc234de56fab7c89012d34e56fa7b8' @@ -183,7 +183,7 @@ # allow displaying the live location of workers on the map MAP_WORKERS = True # filter these Pokemon from the map to reduce traffic and browser load -#MAP_FILTER_IDS = [161, 165, 16, 19, 167] +#MAP_FILTER_IDS = (16, 19, 161, 165, 167) # unix timestamp of last spawn point migration, spawn times learned before this will be ignored LAST_MIGRATION = 1481932800 # Dec. 17th, 2016 @@ -253,7 +253,7 @@ #TZ_OFFSET = 0 # UTC offset in hours (if different from system time) # the required number of seconds remaining to notify about a Pokémon -TIME_REQUIRED = 600 # 10 minutes +TIME_REQUIRED = 600.0 # 10 minutes ### Only set either the NOTIFY_RANKING or NOTIFY_IDS, not both! # The (x) rarest Pokémon will be eligible for notification. Whether a @@ -289,7 +289,7 @@ # Pokémon scores are an average of the Pokémon's rarity score and IV score (from 0 to 1) # If NOTIFY_RANKING is 90, the 90th most common Pokémon will have a rarity of score 0, the rarest will be 1. # IV score is the IV sum divided by 45 (perfect IVs). -FULL_TIME = 1800 # the number of seconds after a notification when only MINIMUM_SCORE will be required +FULL_TIME = 1800.0 # the number of seconds after a notification when only MINIMUM_SCORE will be required INITIAL_SCORE = 0.7 # the required score immediately after a notification MINIMUM_SCORE = 0.4 # the required score after FULL_TIME seconds have passed diff --git a/monocle/__init__.py b/monocle/__init__.py index 2b5924538..71c61ba12 100644 --- a/monocle/__init__.py +++ b/monocle/__init__.py @@ -8,5 +8,7 @@ if sanitized.SPAWN_ID_INT: from pogeo import cellid_to_location as spawnid_to_loc + from pogeo import cellid_to_coords as spawnid_to_coords else: from pogeo import token_to_location as spawnid_to_loc + from pogeo import token_to_coords as spawnid_to_coords diff --git a/monocle/altitudes.py b/monocle/altitudes.py index 364fcad89..6972e4e4a 100644 --- a/monocle/altitudes.py +++ b/monocle/altitudes.py @@ -9,7 +9,7 @@ log = get_logger('altitudes') -ALTITUDES = AltitudeCache(conf.ALT_LEVEL, conf.ALT_RANGE[0], conf.ALT_RANGE[1]) +ALTITUDES = AltitudeCache(conf.ALT_LEVEL, conf.GOOGLE_MAPS_KEY, conf.ALT_RANGE[0], conf.ALT_RANGE[1]) set_altitude = ALTITUDES.set_alt diff --git a/monocle/db.py b/monocle/db.py index 6e33df663..2675d14ef 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -12,7 +12,7 @@ from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION from sqlalchemy.ext.declarative import declarative_base -from . import bounds, db_proc, spawns, spawnid_to_loc, sanitized as conf +from . import bounds, db_proc, spawns, spawnid_to_coords, spawnid_to_loc, sanitized as conf from .utils import time_until_time, dump_pickle, load_pickle from .shared import call_at, get_logger @@ -322,7 +322,7 @@ def session_scope(autoflush=False): try: yield session session.commit() - except: + except Exception: session.rollback() raise finally: @@ -783,4 +783,4 @@ def get_all_spawn_coords(session, pokemon_id=None): points = points.filter(Sighting.pokemon_id == int(pokemon_id)) if conf.REPORT_SINCE: points = points.filter(Sighting.expire_timestamp > SINCE_TIME) - return [spawnid_to_loc(x[0]).coords for x in points] + return [spawnid_to_coords(x[0]) for x in points] diff --git a/monocle/names.py b/monocle/names.py index 3c1d095ea..b1d93912a 100644 --- a/monocle/names.py +++ b/monocle/names.py @@ -4,7 +4,7 @@ language = conf.LANGUAGE.upper()[:2] -POKEMON = defaultdict(lambda: '?', { +POKEMON = { 1: 'Bulbasaur', 2: 'Ivysaur', 3: 'Venusaur', @@ -256,7 +256,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' -}) +} MOVES = defaultdict(lambda: '?', { 1: 'Thunder Shock', @@ -483,7 +483,7 @@ if language == 'EN': pass elif language == 'DE': - POKEMON = defaultdict(lambda: '?', { + POKEMON = { 1: 'Bisasam', 2: 'Bisaknosp', 3: 'Bisaflor', @@ -735,7 +735,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' - }) + } MOVES = defaultdict(lambda: '?', { 1: 'Donnerschock', @@ -959,7 +959,7 @@ 281: 'Kraftreserve' }) elif language == 'FR': - POKEMON = defaultdict(lambda: '?', { + POKEMON = { 1: 'Bulbizarre', 2: 'Herbizarre', 3: 'Florizarre', @@ -1211,7 +1211,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' - }) + } MOVES = defaultdict(lambda: '?', { 1: 'Éclair', @@ -1435,7 +1435,7 @@ 281: 'Puissance Cachée' }) elif language == 'ZH': - POKEMON = defaultdict(lambda: '?', { + POKEMON = { 1: '妙蛙种子', 2: '妙蛙草', 3: '妙蛙花', @@ -1687,10 +1687,10 @@ 249: '洛奇亚', 250: '凤王', 251: '雪拉比' - }) + } # TODO: Chinese move names elif language == 'JA': - POKEMON = defaultdict(lambda: '?', { + POKEMON = { 1: 'フシギダネ', 2: 'フシギソウ', 3: 'フシギバナ', @@ -1942,7 +1942,7 @@ 249: 'ルギア', 250: 'ホウオウ', 251: 'セレビィ' - }) + } MOVES = defaultdict(lambda: '?', { 1: 'でんきショック', @@ -2613,7 +2613,7 @@ 281: 'Introforza' }) elif language == 'KO': - POKEMON = defaultdict(lambda: '?', { + POKEMON = { 1: '이상해씨', 2: '이상해풀', 3: '이상해꽃', @@ -2865,7 +2865,7 @@ 249: '루기아', 250: '칠색조', 251: '세레비' - }) + } MOVES = defaultdict(lambda: '?', { 1: '전기쇼크', diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 0e1ea86a8..769d865c9 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -27,7 +27,7 @@ 'APP_SIMULATION': bool, 'AREA_NAME': str, 'AUTHKEY': bytes, - 'BOOTSTRAP_RADIUS': Number, + 'BOOTSTRAP_LEVEL': int, 'BOUNDARIES': tuple, 'CACHE_CELLS': bool, 'CAPTCHAS_ALLOWED': int, @@ -85,7 +85,7 @@ 'NAME_FONT': str, 'NEVER_NOTIFY_IDS': set_sequence_range, 'NOTIFY': bool, - 'NOTIFY_IDS': set_sequence_range, + 'NOTIFY_IDS': sequence, 'NOTIFY_RANKING': int, 'PASS': str, 'PB_API_KEY': str, @@ -133,14 +133,14 @@ _defaults = { 'ACCOUNTS': None, 'ACCOUNTS_CSV': None, - 'ALT_LEVEL': 12, - 'ALT_RANGE': (300, 400), + 'ALT_LEVEL': 13, + 'ALT_RANGE': (390.0, 490.0), 'ALWAYS_NOTIFY': 0, - 'ALWAYS_NOTIFY_IDS': set(), + 'ALWAYS_NOTIFY_IDS': frozenset(), 'APP_SIMULATION': True, 'AREA_NAME': 'Area', 'AUTHKEY': b'm3wtw0', - 'BOOTSTRAP_RADIUS': 120, + 'BOOTSTRAP_LEVEL': 16, 'BOUNDARIES': None, 'CACHE_CELLS': False, 'CAPTCHAS_ALLOWED': 3, @@ -157,9 +157,9 @@ 'FB_PAGE_ID': None, 'FIXED_OPACITY': False, 'FORCED_KILL': None, - 'FULL_TIME': 1800, - 'GIVE_UP_KNOWN': 75, - 'GIVE_UP_UNKNOWN': 60, + 'FULL_TIME': 1800.0, + 'GIVE_UP_KNOWN': 75.0, + 'GIVE_UP_UNKNOWN': 60.0, 'GOOD_ENOUGH': None, 'GOOGLE_MAPS_KEY': '', 'HASHTAGS': None, @@ -185,11 +185,11 @@ 'MAP_WORKERS': True, 'MAX_CAPTCHAS': 0, 'MAX_RETRIES': 3, - 'MINIMUM_RUNTIME': 10, + 'MINIMUM_RUNTIME': 10.0, 'MOVE_FONT': 'sans-serif', 'MULTI_BOUNDARIES': None, 'NAME_FONT': 'sans-serif', - 'NEVER_NOTIFY_IDS': (), + 'NEVER_NOTIFY_IDS': frozenset(), 'NOTIFY': False, 'NOTIFY_IDS': None, 'NOTIFY_RANKING': None, @@ -199,32 +199,32 @@ 'PLAYER_LOCALE': {'country': 'US', 'language': 'en', 'timezone': 'America/Denver'}, 'PROVIDER': None, 'PROXIES': None, - 'RARE_IDS': (), + 'RARE_IDS': frozenset(), 'RARITY_OVERRIDE': {}, 'REFRESH_RATE': 0.6, 'REPORT_MAPS': True, 'REPORT_SINCE': None, - 'RESCAN_UNKNOWN': 90, + 'RESCAN_UNKNOWN': 90.0, 'SCAN_DELAY': 10, 'SEARCH_SLEEP': 2.5, 'SHOW_TIMER': False, 'SIMULTANEOUS_LOGINS': 2, 'SIMULTANEOUS_SIMULATION': 4, - 'SKIP_SPAWN': 90, + 'SKIP_SPAWN': 90.0, 'SMART_THROTTLE': False, 'SPAWN_ID_INT': True, 'SPEED_LIMIT': None, 'SPEED_UNIT': 'miles', - 'SPIN_COOLDOWN': 300, + 'SPIN_COOLDOWN': 300.0, 'SPIN_POKESTOPS': True, - 'STAT_REFRESH': 5, + 'STAT_REFRESH': 5.0, 'STAY_WITHIN_MAP': True, 'SWAP_OLDEST': 21600 / worker_count, 'TELEGRAM_BOT_TOKEN': None, 'TELEGRAM_CHAT_ID': None, 'TELEGRAM_USERNAME': None, - 'TIME_REQUIRED': 300, - 'TRASH_IDS': (), + 'TIME_REQUIRED': 600.0, + 'TRASH_IDS': frozenset(), 'TWEET_IMAGES': False, 'TWITTER_ACCESS_KEY': None, 'TWITTER_ACCESS_SECRET': None, @@ -236,23 +236,50 @@ 'WEBHOOKS': None } +_cast = { + 'ALWAYS_NOTIFY_IDS': set, + 'ENCOUNTER_IDS': set, + 'FULL_TIME': float, + 'GIVE_UP_KNOWN': float, + 'GIVE_UP_UNKNOWN': float, + 'GOOD_ENOUGH': float, + 'INITIAL_SCORE': float, + 'LOGIN_TIMEOUT': float, + 'MAP_FILTER_IDS': tuple, + 'MINIMUM_RUNTIME': float, + 'MINIMUM_SCORE': float, + 'NEVER_NOTIFY_IDS': set, + 'RARE_IDS': set, + 'REFRESH_RATE': float, + 'SCAN_DELAY': float, + 'SEARCH_SLEEP': float, + 'SKIP_SPAWN': float, + 'SMART_THROTTLE': float, + 'SPEED_LIMIT': float, + 'SPIN_COOLDOWN': float, + 'STAT_REFRESH': float, + 'SWAP_OLDEST': float, + 'TIME_REQUIRED': float, + 'TRASH_IDS': set +} + class Config: __spec__ = __spec__ __slots__ = tuple(_valid_types.keys()) + ('log',) - def __init__(self): + def __init__(self, valid_types=_valid_types, defaults=_defaults, cast=_cast): self.log = getLogger('sanitizer') for key, value in (x for x in vars(config).items() if x[0].isupper()): try: - if isinstance(value, _valid_types[key]): - setattr(self, key, value) - if key in _defaults: - del _defaults[key] - elif key in _defaults and value is _defaults[key]: - setattr(self, key, _defaults.pop(key)) + if isinstance(value, valid_types[key]): + setattr(self, key, value if key not in cast else cast[key](value)) + if key in defaults: + del defaults[key] + elif key in defaults and value is defaults[key]: + setattr(self, key, defaults.pop(key)) else: - valid = _valid_types[key] + valid = valid_types[key] actual = type(value).__name__ if isinstance(valid, type): err = '{} must be {}. Yours is: {}.'.format( @@ -279,4 +306,4 @@ def __getattr__(self, name): sys.modules[__name__] = Config() -del _valid_types, config +del _cast, _valid_types, config diff --git a/monocle/web_utils.py b/monocle/web_utils.py index e38f1a179..b1099d42c 100644 --- a/monocle/web_utils.py +++ b/monocle/web_utils.py @@ -3,10 +3,10 @@ from multiprocessing.managers import BaseManager, RemoteError from time import time -from monocle import sanitized as conf +from monocle import spawnid_to_coords, sanitized as conf from monocle.db import get_forts, Pokestop, session_scope, Sighting, Spawnpoint from monocle.utils import get_address -from monocle.names import DAMAGE, MOVES, POKEMON +from monocle.names import POKEMON def get_args(): @@ -73,45 +73,11 @@ def get_worker_markers(workers): } for worker_no, (location, timestamp, speed, total_seen, visits, seen_here) in workers.data] -def sighting_to_marker(pokemon, names=POKEMON, moves=MOVES, damage=DAMAGE): - pokemon_id = pokemon.pokemon_id - marker = { - 'id': 'pokemon-' + str(pokemon.id), - 'trash': pokemon_id in conf.TRASH_IDS, - 'name': names[pokemon_id], - 'pokemon_id': pokemon_id, - 'lat': pokemon.lat, - 'lon': pokemon.lon, - 'expires_at': pokemon.expire_timestamp, - } - move1 = pokemon.move_1 - if pokemon.move_1: - move2 = pokemon.move_2 - marker['atk'] = pokemon.atk_iv - marker['def'] = pokemon.def_iv - marker['sta'] = pokemon.sta_iv - marker['move1'] = moves[move1] - marker['move2'] = moves[move2] - marker['damage1'] = damage[move1] - marker['damage2'] = damage[move2] - return marker - - -def get_pokemarkers(after_id=0): - with session_scope() as session: - pokemons = session.query(Sighting) \ - .filter(Sighting.expire_timestamp > time(), - Sighting.id > after_id) - if conf.MAP_FILTER_IDS: - pokemons = pokemons.filter(~Sighting.pokemon_id.in_(conf.MAP_FILTER_IDS)) - return tuple(map(sighting_to_marker, pokemons)) - - def get_gym_markers(names=POKEMON): with session_scope() as session: forts = get_forts(session) return [{ - 'id': 'fort-' + str(fort['fort_id']), + 'id': 'fort-' + repr(fort['fort_id']), 'sighting_id': fort['id'], 'prestige': fort['prestige'], 'pokemon_id': fort['guard_pokemon_id'], @@ -122,17 +88,6 @@ def get_gym_markers(names=POKEMON): } for fort in forts] -def get_spawnpoint_markers(): - with session_scope() as session: - spawns = session.query(Spawnpoint) - return [{ - 'spawn_id': spawn.spawn_id, - 'despawn_time': spawn.despawn_time, - 'lat': spawn.lat, - 'lon': spawn.lon, - 'duration': spawn.duration - } for spawn in spawns] - if conf.BOUNDARIES: from shapely.geometry import mapping diff --git a/monocle/worker.py b/monocle/worker.py index 84fd7b5d5..66e3dc99a 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -8,6 +8,7 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc +from pogeo.utils import location_to_cellid, location_to_token from .altitudes import load_alts, set_altitude from .db import SIGHTING_CACHE, MYSTERY_CACHE @@ -1104,8 +1105,6 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): 'type': 'pokemon', 'encounter_id': raw['encounter_id'], 'pokemon_id': raw['pokemon_data']['pokemon_id'], - 'lat': raw['latitude'], - 'lon': raw['longitude'], 'spawn_id': int(raw['spawn_point_id'], 16) if spawn_int else raw['spawn_point_id'] 'seen': tss } @@ -1124,15 +1123,14 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): return norm @staticmethod - def normalize_lured(raw, now): + def normalize_lured(raw, now, spawn_int=conf.SPAWN_ID_INT): + loc = Location(raw['latitude'], raw['longitude']) return { 'type': 'pokemon', 'encounter_id': raw['lure_info']['encounter_id'], 'pokemon_id': raw['lure_info']['active_pokemon_id'], 'expire_timestamp': raw['lure_info']['lure_expires_timestamp_ms'] // 1000, - 'lat': raw['latitude'], - 'lon': raw['longitude'], - 'spawn_id': -1 if conf.SPAWN_ID_INT else 'LURED', + 'spawn_id': location_to_cellid(loc, 30) if spawn_int else location_to_token(loc, 30), 'time_till_hidden': (raw['lure_info']['lure_expires_timestamp_ms'] - now) // 1000, 'inferred': 'pokestop' } diff --git a/web.py b/web.py index 478f3dfce..23af38655 100755 --- a/web.py +++ b/web.py @@ -10,10 +10,11 @@ except ImportError: from json import dumps -from flask import Flask, jsonify, Markup, render_template, request +from pogeo.webcache import SightingCache, SpawnCache +from flask import Flask, jsonify, make_response, Markup, render_template, request from monocle import db, sanitized as conf -from monocle.names import POKEMON +from monocle.names import DAMAGE, MOVES, POKEMON from monocle.web_utils import * from monocle.bounds import area, center @@ -78,9 +79,22 @@ def fullmap(map_html=render_map()): @app.route('/data') -def pokemon_data(): - last_id = request.args.get('last_id', 0) - return jsonify(get_pokemarkers(last_id)) +def pokemon_data( + cache=SightingCache( + conf.TRASH_IDS, POKEMON, MOVES, DAMAGE, conf.MAP_FILTER_IDS, + db.Sighting, db.Session, conf.SPAWN_ID_INT + ), + _resp=make_response): + compress = 'gzip' in request.headers.get('Accept-Encoding', '').lower() + try: + last_id = int(request.args['last_id']) + except KeyError: + last_id = 0 + response = _resp(cache.get_json(last_id, compress)) + response.mimetype = 'application/json' + if compress: + response.headers['Content-Encoding'] = 'gzip' + return response @app.route('/gym_data') @@ -89,8 +103,15 @@ def gym_data(): @app.route('/spawnpoints') -def spawn_points(): - return jsonify(get_spawnpoint_markers()) +def spawn_points( + cache=SpawnCache(conf.SPAWN_ID_INT, db.Spawnpoint, db.Session), + _resp=make_response): + compress = 'gzip' in request.headers.get('Accept-Encoding', '').lower() + response = _resp(cache.get_json(compress)) + response.mimetype = 'application/json' + if compress: + response.headers['Content-Encoding'] = 'gzip' + return response @app.route('/pokestops') From b0cd60f79850c5ef34cd1ad8ca3b6c5df7214fe2 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Wed, 10 May 2017 17:55:44 -0600 Subject: [PATCH 09/38] Update JS --- monocle/static/js/main.js | 51 +++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/monocle/static/js/main.js b/monocle/static/js/main.js index b83c147cd..62b0b257e 100644 --- a/monocle/static/js/main.js +++ b/monocle/static/js/main.js @@ -14,7 +14,7 @@ var PokemonIcon = L.Icon.extend({ '
' + '' + '
' + - '
' + calculateRemainingTime(this.options.expires_at) + '
' + + '
' + calculateRemainingTime(this.options.expire) + '
' + ''; return div; } @@ -73,29 +73,29 @@ monitor(overlays.Gyms, true) monitor(overlays.Workers, false) function getPopupContent (item) { - var diff = (item.expires_at - new Date().getTime() / 1000); - var minutes = parseInt(diff / 60); - var seconds = parseInt(diff - (minutes * 60)); - var expires_at = minutes + 'm ' + seconds + 's'; + var diff = (item.expire - new Date().getTime() / 1000); + var minutes = Math.floor(diff / 60); + var seconds = Math.floor(diff % 60); + var expire = minutes + 'm ' + seconds + 's'; var content = '' + item.name + ' - #' + item.pokemon_id + ''; - if(item.atk != undefined){ + if (item.atk != undefined) { var totaliv = 100 * (item.atk + item.def + item.sta) / 45; content += ' - ' + totaliv.toFixed(2) + '%
'; - content += 'Disappears in: ' + expires_at + '
'; + content += 'Disappears in: ' + expire + '
'; content += 'Move 1: ' + item.move1 + ' ( ' + item.damage1 + ' dps )
'; content += 'Move 2: ' + item.move2 + ' ( ' + item.damage2 + ' dps )
'; content += 'IV: ' + item.atk + ' atk, ' + item.def + ' def, ' + item.sta + ' sta
' } else { - content += '
Disappears in: ' + expires_at + '
'; + content += '
Disappears in: ' + expire + '
'; } - content += 'Hide'; + content += 'Hide'; content += '  |  '; - var userPref = getPreference('filter-'+item.pokemon_id); + var userPref = getPreference('filter-' + item.pid); if (userPref == 'trash'){ - content += 'Move to Pokemon'; + content += 'Move to Pokemon'; }else{ - content += 'Move to Trash'; + content += 'Move to Trash'; } content += '
=> Get directions'; return content; @@ -109,12 +109,11 @@ function getOpacity (diff) { } function PokemonMarker (raw) { - var icon = new PokemonIcon({iconUrl: '/static/monocle-icons/icons/' + raw.pokemon_id + '.png', expires_at: raw.expires_at}); + var icon = new PokemonIcon({iconUrl: '/static/monocle-icons/icons/' + raw.pid + '.png', expire: raw.expire}); var marker = L.marker([raw.lat, raw.lon], {icon: icon, opacity: 1}); - var intId = parseInt(raw.id.split('-')[1]); - if (_last_pokemon_id < intId){ - _last_pokemon_id = intId; + if (_last_pokemon_id < raw.id) { + _last_pokemon_id = raw.id; } if (raw.trash) { @@ -122,12 +121,12 @@ function PokemonMarker (raw) { } else { marker.overlay = 'Pokemon'; } - var userPreference = getPreference('filter-'+raw.pokemon_id); - if (userPreference === 'pokemon'){ + var userPreference = getPreference('filter-' + raw.pid); + if (userPreference === 'pokemon') { marker.overlay = 'Pokemon'; - }else if (userPreference === 'trash'){ + } else if (userPreference === 'trash') { marker.overlay = 'Trash'; - }else if (userPreference === 'hidden'){ + } else if (userPreference === 'hidden') { marker.overlay = 'Hidden'; } marker.raw = raw; @@ -148,7 +147,7 @@ function PokemonMarker (raw) { if (marker.overlay === "Hidden" || overlays[marker.overlay].hidden) { return; } - var diff = marker.raw.expires_at - new Date().getTime() / 1000; + var diff = marker.raw.expire - new Date().getTime() / 1000; if (diff > 0) { marker.setOpacity(getOpacity(diff)); } else { @@ -182,7 +181,7 @@ function FortMarker (raw) { content = 'Team Instinct' } content += '
Prestige: ' + raw.prestige + - '
Guarding Pokemon: ' + raw.pokemon_name + ' (#' + raw.pokemon_id + ')'; + '
Guarding Pokemon: ' + raw.pokemon_name + ' (#' + raw.pid + ')'; } content += '
=> Get directions'; event.popup.setContent(content); @@ -447,12 +446,12 @@ function moveToLayer(id, layer){ layer = layer.toLowerCase(); for(var k in markers) { var m = markers[k]; - if ((k.indexOf("pokemon-") > -1) && (m !== undefined) && (m.raw.pokemon_id === id)){ + if ((m !== undefined) && (m.raw.pid === id)) { m.removeFrom(overlays[m.overlay]); if (layer === 'pokemon'){ m.overlay = "Pokemon"; m.addTo(overlays.Pokemon); - }else if (layer === 'trash') { + } else if (layer === 'trash') { m.overlay = "Trash"; m.addTo(overlays.Trash); } @@ -532,8 +531,8 @@ $('.scroll-up').click(function () { function calculateRemainingTime(expire_at_timestamp) { var diff = (expire_at_timestamp - new Date().getTime() / 1000); - var minutes = parseInt(diff / 60); - var seconds = parseInt(diff - (minutes * 60)); + var minutes = Math.floor(diff / 60); + var seconds = Math.floor(diff % 60); return minutes + ':' + (seconds > 9 ? "" + seconds: "0" + seconds); } From b06b4fc8d9f861df60c66fbe711e85eba82a8723 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Wed, 10 May 2017 18:01:19 -0600 Subject: [PATCH 10/38] Update jquery to 3.2.1 --- .gitattributes | 5 +++++ .../js/{jquery-3.2.0.min.js => jquery-3.2.1.min.js} | 8 ++++---- monocle/templates/newmap.html | 2 +- monocle/templates/report.html | 2 +- monocle/templates/report_single.html | 2 +- monocle/templates/workersmap.html | 2 +- 6 files changed, 13 insertions(+), 8 deletions(-) create mode 100644 .gitattributes rename monocle/static/js/{jquery-3.2.0.min.js => jquery-3.2.1.min.js} (50%) diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..ec1b886d5 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +monocle/static/js/bootstrap.min.js linguist-vendored +monocle/static/js/jquery-3.2.1.min.js linguist-vendored +monocle/static/js/leaflet.js linguist-vendored +monocle/static/css/bootstrap.min.css linguist-vendored +monocle/static/css/leaflet.css linguist-vendored diff --git a/monocle/static/js/jquery-3.2.0.min.js b/monocle/static/js/jquery-3.2.1.min.js similarity index 50% rename from monocle/static/js/jquery-3.2.0.min.js rename to monocle/static/js/jquery-3.2.1.min.js index 2ec0d1da0..644d35e27 100644 --- a/monocle/static/js/jquery-3.2.0.min.js +++ b/monocle/static/js/jquery-3.2.1.min.js @@ -1,4 +1,4 @@ -/*! jQuery v3.2.0 | (c) JS Foundation and other contributors | jquery.org/license */ -!function(a,b){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){"use strict";var c=[],d=a.document,e=Object.getPrototypeOf,f=c.slice,g=c.concat,h=c.push,i=c.indexOf,j={},k=j.toString,l=j.hasOwnProperty,m=l.toString,n=m.call(Object),o={};function p(a,b){b=b||d;var c=b.createElement("script");c.text=a,b.head.appendChild(c).parentNode.removeChild(c)}var q="3.2.0",r=function(a,b){return new r.fn.init(a,b)},s=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,t=/^-ms-/,u=/-([a-z])/g,v=function(a,b){return b.toUpperCase()};r.fn=r.prototype={jquery:q,constructor:r,length:0,toArray:function(){return f.call(this)},get:function(a){return null==a?f.call(this):a<0?this[a+this.length]:this[a]},pushStack:function(a){var b=r.merge(this.constructor(),a);return b.prevObject=this,b},each:function(a){return r.each(this,a)},map:function(a){return this.pushStack(r.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(f.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(a<0?b:0);return this.pushStack(c>=0&&c0&&b-1 in a)}var x=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ha(),z=ha(),A=ha(),B=function(a,b){return a===b&&(l=!0),0},C={}.hasOwnProperty,D=[],E=D.pop,F=D.push,G=D.push,H=D.slice,I=function(a,b){for(var c=0,d=a.length;c+~]|"+K+")"+K+"*"),S=new RegExp("="+K+"*([^\\]'\"]*?)"+K+"*\\]","g"),T=new RegExp(N),U=new RegExp("^"+L+"$"),V={ID:new RegExp("^#("+L+")"),CLASS:new RegExp("^\\.("+L+")"),TAG:new RegExp("^("+L+"|[*])"),ATTR:new RegExp("^"+M),PSEUDO:new RegExp("^"+N),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+K+"*(even|odd|(([+-]|)(\\d*)n|)"+K+"*(?:([+-]|)"+K+"*(\\d+)|))"+K+"*\\)|)","i"),bool:new RegExp("^(?:"+J+")$","i"),needsContext:new RegExp("^"+K+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+K+"*((?:-\\d)?\\d*)"+K+"*\\)|)(?=[^-]|$)","i")},W=/^(?:input|select|textarea|button)$/i,X=/^h\d$/i,Y=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,$=/[+~]/,_=new RegExp("\\\\([\\da-f]{1,6}"+K+"?|("+K+")|.)","ig"),aa=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:d<0?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},ba=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ca=function(a,b){return b?"\0"===a?"\ufffd":a.slice(0,-1)+"\\"+a.charCodeAt(a.length-1).toString(16)+" ":"\\"+a},da=function(){m()},ea=ta(function(a){return a.disabled===!0&&("form"in a||"label"in a)},{dir:"parentNode",next:"legend"});try{G.apply(D=H.call(v.childNodes),v.childNodes),D[v.childNodes.length].nodeType}catch(fa){G={apply:D.length?function(a,b){F.apply(a,H.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function ga(a,b,d,e){var f,h,j,k,l,o,r,s=b&&b.ownerDocument,w=b?b.nodeType:9;if(d=d||[],"string"!=typeof a||!a||1!==w&&9!==w&&11!==w)return d;if(!e&&((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,p)){if(11!==w&&(l=Z.exec(a)))if(f=l[1]){if(9===w){if(!(j=b.getElementById(f)))return d;if(j.id===f)return d.push(j),d}else if(s&&(j=s.getElementById(f))&&t(b,j)&&j.id===f)return d.push(j),d}else{if(l[2])return G.apply(d,b.getElementsByTagName(a)),d;if((f=l[3])&&c.getElementsByClassName&&b.getElementsByClassName)return G.apply(d,b.getElementsByClassName(f)),d}if(c.qsa&&!A[a+" "]&&(!q||!q.test(a))){if(1!==w)s=b,r=a;else if("object"!==b.nodeName.toLowerCase()){(k=b.getAttribute("id"))?k=k.replace(ba,ca):b.setAttribute("id",k=u),o=g(a),h=o.length;while(h--)o[h]="#"+k+" "+sa(o[h]);r=o.join(","),s=$.test(a)&&qa(b.parentNode)||b}if(r)try{return G.apply(d,s.querySelectorAll(r)),d}catch(x){}finally{k===u&&b.removeAttribute("id")}}}return i(a.replace(P,"$1"),b,d,e)}function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ia(a){return a[u]=!0,a}function ja(a){var b=n.createElement("fieldset");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ka(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[e]]=b}function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&a.sourceIndex-b.sourceIndex;if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function na(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function oa(a){return function(b){return"form"in b?b.parentNode&&b.disabled===!1?"label"in b?"label"in b.parentNode?b.parentNode.disabled===a:b.disabled===a:b.isDisabled===a||b.isDisabled!==!a&&ea(b)===a:b.disabled===a:"label"in b&&b.disabled===a}}function pa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function qa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=ga.support={},f=ga.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return!!b&&"HTML"!==b.nodeName},m=ga.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=n.documentElement,p=!f(n),v!==n&&(e=n.defaultView)&&e.top!==e&&(e.addEventListener?e.addEventListener("unload",da,!1):e.attachEvent&&e.attachEvent("onunload",da)),c.attributes=ja(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ja(function(a){return a.appendChild(n.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Y.test(n.getElementsByClassName),c.getById=ja(function(a){return o.appendChild(a).id=u,!n.getElementsByName||!n.getElementsByName(u).length}),c.getById?(d.filter.ID=function(a){var b=a.replace(_,aa);return function(a){return a.getAttribute("id")===b}},d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c?[c]:[]}}):(d.filter.ID=function(a){var b=a.replace(_,aa);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}},d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c,d,e,f=b.getElementById(a);if(f){if(c=f.getAttributeNode("id"),c&&c.value===a)return[f];e=b.getElementsByName(a),d=0;while(f=e[d++])if(c=f.getAttributeNode("id"),c&&c.value===a)return[f]}return[]}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){if("undefined"!=typeof b.getElementsByClassName&&p)return b.getElementsByClassName(a)},r=[],q=[],(c.qsa=Y.test(n.querySelectorAll))&&(ja(function(a){o.appendChild(a).innerHTML="",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+K+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+K+"*(?:value|"+J+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ja(function(a){a.innerHTML="";var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+K+"*[*^$|!~]?="),2!==a.querySelectorAll(":enabled").length&&q.push(":enabled",":disabled"),o.appendChild(a).disabled=!0,2!==a.querySelectorAll(":disabled").length&&q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Y.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ja(function(a){c.disconnectedMatch=s.call(a,"*"),s.call(a,"[s!='']:x"),r.push("!=",N)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Y.test(o.compareDocumentPosition),t=b||Y.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?I(k,a)-I(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?I(k,a)-I(k,b):0;if(e===f)return la(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?la(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},ga.matches=function(a,b){return ga(a,null,null,b)},ga.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(S,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return ga(b,n,null,[a]).length>0},ga.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},ga.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&C.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},ga.escape=function(a){return(a+"").replace(ba,ca)},ga.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},ga.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=ga.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=ga.selectors={cacheLength:50,createPseudo:ia,match:V,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(_,aa),a[3]=(a[3]||a[4]||a[5]||"").replace(_,aa),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||ga.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&ga.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return V.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&T.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(_,aa).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+K+")"+a+"("+K+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=ga.attr(d,a);return null==e?"!="===b:!b||(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(O," ")+" ").indexOf(c)>-1:"|="===b&&(e===c||e.slice(0,c.length+1)===c+"-"))}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||ga.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ia(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=I(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ia(function(a){var b=[],c=[],d=h(a.replace(P,"$1"));return d[u]?ia(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ia(function(a){return function(b){return ga(a,b).length>0}}),contains:ia(function(a){return a=a.replace(_,aa),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ia(function(a){return U.test(a||"")||ga.error("unsupported lang: "+a),a=a.replace(_,aa).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:oa(!1),disabled:oa(!0),checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return X.test(a.nodeName)},input:function(a){return W.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:pa(function(){return[0]}),last:pa(function(a,b){return[b-1]}),eq:pa(function(a,b,c){return[c<0?c+b:c]}),even:pa(function(a,b){for(var c=0;c=0;)a.push(d);return a}),gt:pa(function(a,b,c){for(var d=c<0?c+b:c;++d1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function va(a,b,c){for(var d=0,e=b.length;d-1&&(f[j]=!(g[j]=l))}}else r=wa(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):G.apply(g,r)})}function ya(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ta(function(a){return a===b},h,!0),l=ta(function(a){return I(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];i1&&ua(m),i>1&&sa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(P,"$1"),c,i0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=E.call(i));u=wa(u)}G.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&ga.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ia(f):f}return h=ga.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=ya(b[c]),f[u]?d.push(f):e.push(f);f=A(a,za(e,d)),f.selector=a}return f},i=ga.select=function(a,b,c,e){var f,i,j,k,l,m="function"==typeof a&&a,n=!e&&g(a=m.selector||a);if(c=c||[],1===n.length){if(i=n[0]=n[0].slice(0),i.length>2&&"ID"===(j=i[0]).type&&9===b.nodeType&&p&&d.relative[i[1].type]){if(b=(d.find.ID(j.matches[0].replace(_,aa),b)||[])[0],!b)return c;m&&(b=b.parentNode),a=a.slice(i.shift().value.length)}f=V.needsContext.test(a)?0:i.length;while(f--){if(j=i[f],d.relative[k=j.type])break;if((l=d.find[k])&&(e=l(j.matches[0].replace(_,aa),$.test(i[0].type)&&qa(b.parentNode)||b))){if(i.splice(f,1),a=e.length&&sa(i),!a)return G.apply(c,e),c;break}}}return(m||h(a,n))(e,b,!p,c,!b||$.test(a)&&qa(b.parentNode)||b),c},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ja(function(a){return 1&a.compareDocumentPosition(n.createElement("fieldset"))}),ja(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ka("type|href|height|width",function(a,b,c){if(!c)return a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ja(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ka("value",function(a,b,c){if(!c&&"input"===a.nodeName.toLowerCase())return a.defaultValue}),ja(function(a){return null==a.getAttribute("disabled")})||ka(J,function(a,b,c){var d;if(!c)return a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),ga}(a);r.find=x,r.expr=x.selectors,r.expr[":"]=r.expr.pseudos,r.uniqueSort=r.unique=x.uniqueSort,r.text=x.getText,r.isXMLDoc=x.isXML,r.contains=x.contains,r.escapeSelector=x.escape;var y=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&r(a).is(c))break;d.push(a)}return d},z=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},A=r.expr.match.needsContext;function B(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()}var C=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i,D=/^.[^:#\[\.,]*$/;function E(a,b,c){return r.isFunction(b)?r.grep(a,function(a,d){return!!b.call(a,d,a)!==c}):b.nodeType?r.grep(a,function(a){return a===b!==c}):"string"!=typeof b?r.grep(a,function(a){return i.call(b,a)>-1!==c}):D.test(b)?r.filter(b,a,c):(b=r.filter(b,a),r.grep(a,function(a){return i.call(b,a)>-1!==c&&1===a.nodeType}))}r.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?r.find.matchesSelector(d,a)?[d]:[]:r.find.matches(a,r.grep(b,function(a){return 1===a.nodeType}))},r.fn.extend({find:function(a){var b,c,d=this.length,e=this;if("string"!=typeof a)return this.pushStack(r(a).filter(function(){for(b=0;b1?r.uniqueSort(c):c},filter:function(a){return this.pushStack(E(this,a||[],!1))},not:function(a){return this.pushStack(E(this,a||[],!0))},is:function(a){return!!E(this,"string"==typeof a&&A.test(a)?r(a):a||[],!1).length}});var F,G=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/,H=r.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||F,"string"==typeof a){if(e="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:G.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof r?b[0]:b,r.merge(this,r.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),C.test(e[1])&&r.isPlainObject(b))for(e in b)r.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}return f=d.getElementById(e[2]),f&&(this[0]=f,this.length=1),this}return a.nodeType?(this[0]=a,this.length=1,this):r.isFunction(a)?void 0!==c.ready?c.ready(a):a(r):r.makeArray(a,this)};H.prototype=r.fn,F=r(d);var I=/^(?:parents|prev(?:Until|All))/,J={children:!0,contents:!0,next:!0,prev:!0};r.fn.extend({has:function(a){var b=r(a,this),c=b.length;return this.filter(function(){for(var a=0;a-1:1===c.nodeType&&r.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?r.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?i.call(r(a),this[0]):i.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(r.uniqueSort(r.merge(this.get(),r(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function K(a,b){while((a=a[b])&&1!==a.nodeType);return a}r.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return y(a,"parentNode")},parentsUntil:function(a,b,c){return y(a,"parentNode",c)},next:function(a){return K(a,"nextSibling")},prev:function(a){return K(a,"previousSibling")},nextAll:function(a){return y(a,"nextSibling")},prevAll:function(a){return y(a,"previousSibling")},nextUntil:function(a,b,c){return y(a,"nextSibling",c)},prevUntil:function(a,b,c){return y(a,"previousSibling",c)},siblings:function(a){return z((a.parentNode||{}).firstChild,a)},children:function(a){return z(a.firstChild)},contents:function(a){return B(a,"iframe")?a.contentDocument:(B(a,"template")&&(a=a.content||a),r.merge([],a.childNodes))}},function(a,b){r.fn[a]=function(c,d){var e=r.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=r.filter(d,e)),this.length>1&&(J[a]||r.uniqueSort(e),I.test(a)&&e.reverse()),this.pushStack(e)}});var L=/[^\x20\t\r\n\f]+/g;function M(a){var b={};return r.each(a.match(L)||[],function(a,c){b[c]=!0}),b}r.Callbacks=function(a){a="string"==typeof a?M(a):r.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=e||a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h-1)f.splice(c,1),c<=h&&h--}),this},has:function(a){return a?r.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=g=[],c||b||(f=c=""),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j};function N(a){return a}function O(a){throw a}function P(a,b,c,d){var e;try{a&&r.isFunction(e=a.promise)?e.call(a).done(b).fail(c):a&&r.isFunction(e=a.then)?e.call(a,b,c):b.apply(void 0,[a].slice(d))}catch(a){c.apply(void 0,[a])}}r.extend({Deferred:function(b){var c=[["notify","progress",r.Callbacks("memory"),r.Callbacks("memory"),2],["resolve","done",r.Callbacks("once memory"),r.Callbacks("once memory"),0,"resolved"],["reject","fail",r.Callbacks("once memory"),r.Callbacks("once memory"),1,"rejected"]],d="pending",e={state:function(){return d},always:function(){return f.done(arguments).fail(arguments),this},"catch":function(a){return e.then(null,a)},pipe:function(){var a=arguments;return r.Deferred(function(b){r.each(c,function(c,d){var e=r.isFunction(a[d[4]])&&a[d[4]];f[d[1]](function(){var a=e&&e.apply(this,arguments);a&&r.isFunction(a.promise)?a.promise().progress(b.notify).done(b.resolve).fail(b.reject):b[d[0]+"With"](this,e?[a]:arguments)})}),a=null}).promise()},then:function(b,d,e){var f=0;function g(b,c,d,e){return function(){var h=this,i=arguments,j=function(){var a,j;if(!(b=f&&(d!==O&&(h=void 0,i=[a]),c.rejectWith(h,i))}};b?k():(r.Deferred.getStackHook&&(k.stackTrace=r.Deferred.getStackHook()),a.setTimeout(k))}}return r.Deferred(function(a){c[0][3].add(g(0,a,r.isFunction(e)?e:N,a.notifyWith)),c[1][3].add(g(0,a,r.isFunction(b)?b:N)),c[2][3].add(g(0,a,r.isFunction(d)?d:O))}).promise()},promise:function(a){return null!=a?r.extend(a,e):e}},f={};return r.each(c,function(a,b){var g=b[2],h=b[5];e[b[1]]=g.add,h&&g.add(function(){d=h},c[3-a][2].disable,c[0][2].lock),g.add(b[3].fire),f[b[0]]=function(){return f[b[0]+"With"](this===f?void 0:this,arguments),this},f[b[0]+"With"]=g.fireWith}),e.promise(f),b&&b.call(f,f),f},when:function(a){var b=arguments.length,c=b,d=Array(c),e=f.call(arguments),g=r.Deferred(),h=function(a){return function(c){d[a]=this,e[a]=arguments.length>1?f.call(arguments):c,--b||g.resolveWith(d,e)}};if(b<=1&&(P(a,g.done(h(c)).resolve,g.reject,!b),"pending"===g.state()||r.isFunction(e[c]&&e[c].then)))return g.then();while(c--)P(e[c],h(c),g.reject);return g.promise()}});var Q=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;r.Deferred.exceptionHook=function(b,c){a.console&&a.console.warn&&b&&Q.test(b.name)&&a.console.warn("jQuery.Deferred exception: "+b.message,b.stack,c)},r.readyException=function(b){a.setTimeout(function(){throw b})};var R=r.Deferred();r.fn.ready=function(a){return R.then(a)["catch"](function(a){r.readyException(a)}),this},r.extend({isReady:!1,readyWait:1,ready:function(a){(a===!0?--r.readyWait:r.isReady)||(r.isReady=!0,a!==!0&&--r.readyWait>0||R.resolveWith(d,[r]))}}),r.ready.then=R.then;function S(){d.removeEventListener("DOMContentLoaded",S), -a.removeEventListener("load",S),r.ready()}"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll?a.setTimeout(r.ready):(d.addEventListener("DOMContentLoaded",S),a.addEventListener("load",S));var T=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===r.type(c)){e=!0;for(h in c)T(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,r.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(r(a),c)})),b))for(;h1,null,!0)},removeData:function(a){return this.each(function(){X.remove(this,a)})}}),r.extend({queue:function(a,b,c){var d;if(a)return b=(b||"fx")+"queue",d=W.get(a,b),c&&(!d||Array.isArray(c)?d=W.access(a,b,r.makeArray(c)):d.push(c)),d||[]},dequeue:function(a,b){b=b||"fx";var c=r.queue(a,b),d=c.length,e=c.shift(),f=r._queueHooks(a,b),g=function(){r.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return W.get(a,c)||W.access(a,c,{empty:r.Callbacks("once memory").add(function(){W.remove(a,[b+"queue",c])})})}}),r.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length\x20\t\r\n\f]+)/i,la=/^$|\/(?:java|ecma)script/i,ma={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};ma.optgroup=ma.option,ma.tbody=ma.tfoot=ma.colgroup=ma.caption=ma.thead,ma.th=ma.td;function na(a,b){var c;return c="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):[],void 0===b||b&&B(a,b)?r.merge([a],c):c}function oa(a,b){for(var c=0,d=a.length;c-1)e&&e.push(f);else if(j=r.contains(f.ownerDocument,f),g=na(l.appendChild(f),"script"),j&&oa(g),c){k=0;while(f=g[k++])la.test(f.type||"")&&c.push(f)}return l}!function(){var a=d.createDocumentFragment(),b=a.appendChild(d.createElement("div")),c=d.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),o.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="",o.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var ra=d.documentElement,sa=/^key/,ta=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,ua=/^([^.]*)(?:\.(.+)|)/;function va(){return!0}function wa(){return!1}function xa(){try{return d.activeElement}catch(a){}}function ya(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)ya(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=wa;else if(!e)return a;return 1===f&&(g=e,e=function(a){return r().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=r.guid++)),a.each(function(){r.event.add(this,b,e,d,c)})}r.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,p,q=W.get(a);if(q){c.handler&&(f=c,c=f.handler,e=f.selector),e&&r.find.matchesSelector(ra,e),c.guid||(c.guid=r.guid++),(i=q.events)||(i=q.events={}),(g=q.handle)||(g=q.handle=function(b){return"undefined"!=typeof r&&r.event.triggered!==b.type?r.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(L)||[""],j=b.length;while(j--)h=ua.exec(b[j])||[],n=p=h[1],o=(h[2]||"").split(".").sort(),n&&(l=r.event.special[n]||{},n=(e?l.delegateType:l.bindType)||n,l=r.event.special[n]||{},k=r.extend({type:n,origType:p,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&r.expr.match.needsContext.test(e),namespace:o.join(".")},f),(m=i[n])||(m=i[n]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,o,g)!==!1||a.addEventListener&&a.addEventListener(n,g)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),r.event.global[n]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,o,p,q=W.hasData(a)&&W.get(a);if(q&&(i=q.events)){b=(b||"").match(L)||[""],j=b.length;while(j--)if(h=ua.exec(b[j])||[],n=p=h[1],o=(h[2]||"").split(".").sort(),n){l=r.event.special[n]||{},n=(d?l.delegateType:l.bindType)||n,m=i[n]||[],h=h[2]&&new RegExp("(^|\\.)"+o.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&p!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,o,q.handle)!==!1||r.removeEvent(a,n,q.handle),delete i[n])}else for(n in i)r.event.remove(a,n+b[j],c,d,!0);r.isEmptyObject(i)&&W.remove(a,"handle events")}},dispatch:function(a){var b=r.event.fix(a),c,d,e,f,g,h,i=new Array(arguments.length),j=(W.get(this,"events")||{})[b.type]||[],k=r.event.special[b.type]||{};for(i[0]=b,c=1;c=1))for(;j!==this;j=j.parentNode||this)if(1===j.nodeType&&("click"!==a.type||j.disabled!==!0)){for(f=[],g={},c=0;c-1:r.find(e,this,null,[j]).length),g[e]&&f.push(d);f.length&&h.push({elem:j,handlers:f})}return j=this,i\x20\t\r\n\f]*)[^>]*)\/>/gi,Aa=/\s*$/g;function Ea(a,b){return B(a,"table")&&B(11!==b.nodeType?b:b.firstChild,"tr")?r(">tbody",a)[0]||a:a}function Fa(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function Ga(a){var b=Ca.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function Ha(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(W.hasData(a)&&(f=W.access(a),g=W.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;c1&&"string"==typeof q&&!o.checkClone&&Ba.test(q))return a.each(function(e){var f=a.eq(e);s&&(b[0]=q.call(this,e,f.html())),Ja(f,b,c,d)});if(m&&(e=qa(b,a[0].ownerDocument,!1,a,d),f=e.firstChild,1===e.childNodes.length&&(e=f),f||d)){for(h=r.map(na(e,"script"),Fa),i=h.length;l")},clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=r.contains(a.ownerDocument,a);if(!(o.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||r.isXMLDoc(a)))for(g=na(h),f=na(a),d=0,e=f.length;d0&&oa(g,!i&&na(a,"script")),h},cleanData:function(a){for(var b,c,d,e=r.event.special,f=0;void 0!==(c=a[f]);f++)if(U(c)){if(b=c[W.expando]){if(b.events)for(d in b.events)e[d]?r.event.remove(c,d):r.removeEvent(c,d,b.handle);c[W.expando]=void 0}c[X.expando]&&(c[X.expando]=void 0)}}}),r.fn.extend({detach:function(a){return Ka(this,a,!0)},remove:function(a){return Ka(this,a)},text:function(a){return T(this,function(a){return void 0===a?r.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=a)})},null,a,arguments.length)},append:function(){return Ja(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ea(this,a);b.appendChild(a)}})},prepend:function(){return Ja(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ea(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return Ja(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return Ja(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(r.cleanData(na(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null!=a&&a,b=null==b?a:b,this.map(function(){return r.clone(this,a,b)})},html:function(a){return T(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!Aa.test(a)&&!ma[(ka.exec(a)||["",""])[1].toLowerCase()]){a=r.htmlPrefilter(a);try{for(;c1)}});function _a(a,b,c,d,e){return new _a.prototype.init(a,b,c,d,e)}r.Tween=_a,_a.prototype={constructor:_a,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||r.easing._default,this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(r.cssNumber[c]?"":"px")},cur:function(){var a=_a.propHooks[this.prop];return a&&a.get?a.get(this):_a.propHooks._default.get(this)},run:function(a){var b,c=_a.propHooks[this.prop];return this.options.duration?this.pos=b=r.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):this.pos=b=a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):_a.propHooks._default.set(this),this}},_a.prototype.init.prototype=_a.prototype,_a.propHooks={_default:{get:function(a){var b;return 1!==a.elem.nodeType||null!=a.elem[a.prop]&&null==a.elem.style[a.prop]?a.elem[a.prop]:(b=r.css(a.elem,a.prop,""),b&&"auto"!==b?b:0)},set:function(a){r.fx.step[a.prop]?r.fx.step[a.prop](a):1!==a.elem.nodeType||null==a.elem.style[r.cssProps[a.prop]]&&!r.cssHooks[a.prop]?a.elem[a.prop]=a.now:r.style(a.elem,a.prop,a.now+a.unit)}}},_a.propHooks.scrollTop=_a.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},r.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2},_default:"swing"},r.fx=_a.prototype.init,r.fx.step={};var ab,bb,cb=/^(?:toggle|show|hide)$/,db=/queueHooks$/;function eb(){bb&&(d.hidden===!1&&a.requestAnimationFrame?a.requestAnimationFrame(eb):a.setTimeout(eb,r.fx.interval),r.fx.tick())}function fb(){return a.setTimeout(function(){ab=void 0}),ab=r.now()}function gb(a,b){var c,d=0,e={height:a};for(b=b?1:0;d<4;d+=2-b)c=ca[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function hb(a,b,c){for(var d,e=(kb.tweeners[b]||[]).concat(kb.tweeners["*"]),f=0,g=e.length;f1)},removeAttr:function(a){return this.each(function(){r.removeAttr(this,a)})}}),r.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return"undefined"==typeof a.getAttribute?r.prop(a,b,c):(1===f&&r.isXMLDoc(a)||(e=r.attrHooks[b.toLowerCase()]||(r.expr.match.bool.test(b)?lb:void 0)),void 0!==c?null===c?void r.removeAttr(a,b):e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:(a.setAttribute(b,c+""),c):e&&"get"in e&&null!==(d=e.get(a,b))?d:(d=r.find.attr(a,b),null==d?void 0:d)); -},attrHooks:{type:{set:function(a,b){if(!o.radioValue&&"radio"===b&&B(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}},removeAttr:function(a,b){var c,d=0,e=b&&b.match(L);if(e&&1===a.nodeType)while(c=e[d++])a.removeAttribute(c)}}),lb={set:function(a,b,c){return b===!1?r.removeAttr(a,c):a.setAttribute(c,c),c}},r.each(r.expr.match.bool.source.match(/\w+/g),function(a,b){var c=mb[b]||r.find.attr;mb[b]=function(a,b,d){var e,f,g=b.toLowerCase();return d||(f=mb[g],mb[g]=e,e=null!=c(a,b,d)?g:null,mb[g]=f),e}});var nb=/^(?:input|select|textarea|button)$/i,ob=/^(?:a|area)$/i;r.fn.extend({prop:function(a,b){return T(this,r.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[r.propFix[a]||a]})}}),r.extend({prop:function(a,b,c){var d,e,f=a.nodeType;if(3!==f&&8!==f&&2!==f)return 1===f&&r.isXMLDoc(a)||(b=r.propFix[b]||b,e=r.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){var b=r.find.attr(a,"tabindex");return b?parseInt(b,10):nb.test(a.nodeName)||ob.test(a.nodeName)&&a.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),o.optSelected||(r.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null},set:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex)}}),r.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){r.propFix[this.toLowerCase()]=this});function pb(a){var b=a.match(L)||[];return b.join(" ")}function qb(a){return a.getAttribute&&a.getAttribute("class")||""}r.fn.extend({addClass:function(a){var b,c,d,e,f,g,h,i=0;if(r.isFunction(a))return this.each(function(b){r(this).addClass(a.call(this,b,qb(this)))});if("string"==typeof a&&a){b=a.match(L)||[];while(c=this[i++])if(e=qb(c),d=1===c.nodeType&&" "+pb(e)+" "){g=0;while(f=b[g++])d.indexOf(" "+f+" ")<0&&(d+=f+" ");h=pb(d),e!==h&&c.setAttribute("class",h)}}return this},removeClass:function(a){var b,c,d,e,f,g,h,i=0;if(r.isFunction(a))return this.each(function(b){r(this).removeClass(a.call(this,b,qb(this)))});if(!arguments.length)return this.attr("class","");if("string"==typeof a&&a){b=a.match(L)||[];while(c=this[i++])if(e=qb(c),d=1===c.nodeType&&" "+pb(e)+" "){g=0;while(f=b[g++])while(d.indexOf(" "+f+" ")>-1)d=d.replace(" "+f+" "," ");h=pb(d),e!==h&&c.setAttribute("class",h)}}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):r.isFunction(a)?this.each(function(c){r(this).toggleClass(a.call(this,c,qb(this),b),b)}):this.each(function(){var b,d,e,f;if("string"===c){d=0,e=r(this),f=a.match(L)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else void 0!==a&&"boolean"!==c||(b=qb(this),b&&W.set(this,"__className__",b),this.setAttribute&&this.setAttribute("class",b||a===!1?"":W.get(this,"__className__")||""))})},hasClass:function(a){var b,c,d=0;b=" "+a+" ";while(c=this[d++])if(1===c.nodeType&&(" "+pb(qb(c))+" ").indexOf(b)>-1)return!0;return!1}});var rb=/\r/g;r.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=r.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,r(this).val()):a,null==e?e="":"number"==typeof e?e+="":Array.isArray(e)&&(e=r.map(e,function(a){return null==a?"":a+""})),b=r.valHooks[this.type]||r.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=r.valHooks[e.type]||r.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(rb,""):null==c?"":c)}}}),r.extend({valHooks:{option:{get:function(a){var b=r.find.attr(a,"value");return null!=b?b:pb(r.text(a))}},select:{get:function(a){var b,c,d,e=a.options,f=a.selectedIndex,g="select-one"===a.type,h=g?null:[],i=g?f+1:e.length;for(d=f<0?i:g?f:0;d-1)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),r.each(["radio","checkbox"],function(){r.valHooks[this]={set:function(a,b){if(Array.isArray(b))return a.checked=r.inArray(r(a).val(),b)>-1}},o.checkOn||(r.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})});var sb=/^(?:focusinfocus|focusoutblur)$/;r.extend(r.event,{trigger:function(b,c,e,f){var g,h,i,j,k,m,n,o=[e||d],p=l.call(b,"type")?b.type:b,q=l.call(b,"namespace")?b.namespace.split("."):[];if(h=i=e=e||d,3!==e.nodeType&&8!==e.nodeType&&!sb.test(p+r.event.triggered)&&(p.indexOf(".")>-1&&(q=p.split("."),p=q.shift(),q.sort()),k=p.indexOf(":")<0&&"on"+p,b=b[r.expando]?b:new r.Event(p,"object"==typeof b&&b),b.isTrigger=f?2:3,b.namespace=q.join("."),b.rnamespace=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=e),c=null==c?[b]:r.makeArray(c,[b]),n=r.event.special[p]||{},f||!n.trigger||n.trigger.apply(e,c)!==!1)){if(!f&&!n.noBubble&&!r.isWindow(e)){for(j=n.delegateType||p,sb.test(j+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),i=h;i===(e.ownerDocument||d)&&o.push(i.defaultView||i.parentWindow||a)}g=0;while((h=o[g++])&&!b.isPropagationStopped())b.type=g>1?j:n.bindType||p,m=(W.get(h,"events")||{})[b.type]&&W.get(h,"handle"),m&&m.apply(h,c),m=k&&h[k],m&&m.apply&&U(h)&&(b.result=m.apply(h,c),b.result===!1&&b.preventDefault());return b.type=p,f||b.isDefaultPrevented()||n._default&&n._default.apply(o.pop(),c)!==!1||!U(e)||k&&r.isFunction(e[p])&&!r.isWindow(e)&&(i=e[k],i&&(e[k]=null),r.event.triggered=p,e[p](),r.event.triggered=void 0,i&&(e[k]=i)),b.result}},simulate:function(a,b,c){var d=r.extend(new r.Event,c,{type:a,isSimulated:!0});r.event.trigger(d,null,b)}}),r.fn.extend({trigger:function(a,b){return this.each(function(){r.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];if(c)return r.event.trigger(a,b,c,!0)}}),r.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(a,b){r.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),r.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),o.focusin="onfocusin"in a,o.focusin||r.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){r.event.simulate(b,a.target,r.event.fix(a))};r.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=W.access(d,b);e||d.addEventListener(a,c,!0),W.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=W.access(d,b)-1;e?W.access(d,b,e):(d.removeEventListener(a,c,!0),W.remove(d,b))}}});var tb=a.location,ub=r.now(),vb=/\?/;r.parseXML=function(b){var c;if(!b||"string"!=typeof b)return null;try{c=(new a.DOMParser).parseFromString(b,"text/xml")}catch(d){c=void 0}return c&&!c.getElementsByTagName("parsererror").length||r.error("Invalid XML: "+b),c};var wb=/\[\]$/,xb=/\r?\n/g,yb=/^(?:submit|button|image|reset|file)$/i,zb=/^(?:input|select|textarea|keygen)/i;function Ab(a,b,c,d){var e;if(Array.isArray(b))r.each(b,function(b,e){c||wb.test(a)?d(a,e):Ab(a+"["+("object"==typeof e&&null!=e?b:"")+"]",e,c,d)});else if(c||"object"!==r.type(b))d(a,b);else for(e in b)Ab(a+"["+e+"]",b[e],c,d)}r.param=function(a,b){var c,d=[],e=function(a,b){var c=r.isFunction(b)?b():b;d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(null==c?"":c)};if(Array.isArray(a)||a.jquery&&!r.isPlainObject(a))r.each(a,function(){e(this.name,this.value)});else for(c in a)Ab(c,a[c],b,e);return d.join("&")},r.fn.extend({serialize:function(){return r.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=r.prop(this,"elements");return a?r.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!r(this).is(":disabled")&&zb.test(this.nodeName)&&!yb.test(a)&&(this.checked||!ja.test(a))}).map(function(a,b){var c=r(this).val();return null==c?null:Array.isArray(c)?r.map(c,function(a){return{name:b.name,value:a.replace(xb,"\r\n")}}):{name:b.name,value:c.replace(xb,"\r\n")}}).get()}});var Bb=/%20/g,Cb=/#.*$/,Db=/([?&])_=[^&]*/,Eb=/^(.*?):[ \t]*([^\r\n]*)$/gm,Fb=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Gb=/^(?:GET|HEAD)$/,Hb=/^\/\//,Ib={},Jb={},Kb="*/".concat("*"),Lb=d.createElement("a");Lb.href=tb.href;function Mb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(L)||[];if(r.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function Nb(a,b,c,d){var e={},f=a===Jb;function g(h){var i;return e[h]=!0,r.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function Ob(a,b){var c,d,e=r.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&r.extend(!0,a,d),a}function Pb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}if(f)return f!==i[0]&&i.unshift(f),c[f]}function Qb(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}r.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:tb.href,type:"GET",isLocal:Fb.test(tb.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Kb,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":r.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?Ob(Ob(a,r.ajaxSettings),b):Ob(r.ajaxSettings,a)},ajaxPrefilter:Mb(Ib),ajaxTransport:Mb(Jb),ajax:function(b,c){"object"==typeof b&&(c=b,b=void 0),c=c||{};var e,f,g,h,i,j,k,l,m,n,o=r.ajaxSetup({},c),p=o.context||o,q=o.context&&(p.nodeType||p.jquery)?r(p):r.event,s=r.Deferred(),t=r.Callbacks("once memory"),u=o.statusCode||{},v={},w={},x="canceled",y={readyState:0,getResponseHeader:function(a){var b;if(k){if(!h){h={};while(b=Eb.exec(g))h[b[1].toLowerCase()]=b[2]}b=h[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return k?g:null},setRequestHeader:function(a,b){return null==k&&(a=w[a.toLowerCase()]=w[a.toLowerCase()]||a,v[a]=b),this},overrideMimeType:function(a){return null==k&&(o.mimeType=a),this},statusCode:function(a){var b;if(a)if(k)y.always(a[y.status]);else for(b in a)u[b]=[u[b],a[b]];return this},abort:function(a){var b=a||x;return e&&e.abort(b),A(0,b),this}};if(s.promise(y),o.url=((b||o.url||tb.href)+"").replace(Hb,tb.protocol+"//"),o.type=c.method||c.type||o.method||o.type,o.dataTypes=(o.dataType||"*").toLowerCase().match(L)||[""],null==o.crossDomain){j=d.createElement("a");try{j.href=o.url,j.href=j.href,o.crossDomain=Lb.protocol+"//"+Lb.host!=j.protocol+"//"+j.host}catch(z){o.crossDomain=!0}}if(o.data&&o.processData&&"string"!=typeof o.data&&(o.data=r.param(o.data,o.traditional)),Nb(Ib,o,c,y),k)return y;l=r.event&&o.global,l&&0===r.active++&&r.event.trigger("ajaxStart"),o.type=o.type.toUpperCase(),o.hasContent=!Gb.test(o.type),f=o.url.replace(Cb,""),o.hasContent?o.data&&o.processData&&0===(o.contentType||"").indexOf("application/x-www-form-urlencoded")&&(o.data=o.data.replace(Bb,"+")):(n=o.url.slice(f.length),o.data&&(f+=(vb.test(f)?"&":"?")+o.data,delete o.data),o.cache===!1&&(f=f.replace(Db,"$1"),n=(vb.test(f)?"&":"?")+"_="+ub++ +n),o.url=f+n),o.ifModified&&(r.lastModified[f]&&y.setRequestHeader("If-Modified-Since",r.lastModified[f]),r.etag[f]&&y.setRequestHeader("If-None-Match",r.etag[f])),(o.data&&o.hasContent&&o.contentType!==!1||c.contentType)&&y.setRequestHeader("Content-Type",o.contentType),y.setRequestHeader("Accept",o.dataTypes[0]&&o.accepts[o.dataTypes[0]]?o.accepts[o.dataTypes[0]]+("*"!==o.dataTypes[0]?", "+Kb+"; q=0.01":""):o.accepts["*"]);for(m in o.headers)y.setRequestHeader(m,o.headers[m]);if(o.beforeSend&&(o.beforeSend.call(p,y,o)===!1||k))return y.abort();if(x="abort",t.add(o.complete),y.done(o.success),y.fail(o.error),e=Nb(Jb,o,c,y)){if(y.readyState=1,l&&q.trigger("ajaxSend",[y,o]),k)return y;o.async&&o.timeout>0&&(i=a.setTimeout(function(){y.abort("timeout")},o.timeout));try{k=!1,e.send(v,A)}catch(z){if(k)throw z;A(-1,z)}}else A(-1,"No Transport");function A(b,c,d,h){var j,m,n,v,w,x=c;k||(k=!0,i&&a.clearTimeout(i),e=void 0,g=h||"",y.readyState=b>0?4:0,j=b>=200&&b<300||304===b,d&&(v=Pb(o,y,d)),v=Qb(o,v,y,j),j?(o.ifModified&&(w=y.getResponseHeader("Last-Modified"),w&&(r.lastModified[f]=w),w=y.getResponseHeader("etag"),w&&(r.etag[f]=w)),204===b||"HEAD"===o.type?x="nocontent":304===b?x="notmodified":(x=v.state,m=v.data,n=v.error,j=!n)):(n=x,!b&&x||(x="error",b<0&&(b=0))),y.status=b,y.statusText=(c||x)+"",j?s.resolveWith(p,[m,x,y]):s.rejectWith(p,[y,x,n]),y.statusCode(u),u=void 0,l&&q.trigger(j?"ajaxSuccess":"ajaxError",[y,o,j?m:n]),t.fireWith(p,[y,x]),l&&(q.trigger("ajaxComplete",[y,o]),--r.active||r.event.trigger("ajaxStop")))}return y},getJSON:function(a,b,c){return r.get(a,b,c,"json")},getScript:function(a,b){return r.get(a,void 0,b,"script")}}),r.each(["get","post"],function(a,b){r[b]=function(a,c,d,e){return r.isFunction(c)&&(e=e||d,d=c,c=void 0),r.ajax(r.extend({url:a,type:b,dataType:e,data:c,success:d},r.isPlainObject(a)&&a))}}),r._evalUrl=function(a){return r.ajax({url:a,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},r.fn.extend({wrapAll:function(a){var b;return this[0]&&(r.isFunction(a)&&(a=a.call(this[0])),b=r(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this},wrapInner:function(a){return r.isFunction(a)?this.each(function(b){r(this).wrapInner(a.call(this,b))}):this.each(function(){var b=r(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=r.isFunction(a);return this.each(function(c){r(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(a){return this.parent(a).not("body").each(function(){r(this).replaceWith(this.childNodes)}),this}}),r.expr.pseudos.hidden=function(a){return!r.expr.pseudos.visible(a)},r.expr.pseudos.visible=function(a){return!!(a.offsetWidth||a.offsetHeight||a.getClientRects().length)},r.ajaxSettings.xhr=function(){try{return new a.XMLHttpRequest}catch(b){}};var Rb={0:200,1223:204},Sb=r.ajaxSettings.xhr();o.cors=!!Sb&&"withCredentials"in Sb,o.ajax=Sb=!!Sb,r.ajaxTransport(function(b){var c,d;if(o.cors||Sb&&!b.crossDomain)return{send:function(e,f){var g,h=b.xhr();if(h.open(b.type,b.url,b.async,b.username,b.password),b.xhrFields)for(g in b.xhrFields)h[g]=b.xhrFields[g];b.mimeType&&h.overrideMimeType&&h.overrideMimeType(b.mimeType),b.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest");for(g in e)h.setRequestHeader(g,e[g]);c=function(a){return function(){c&&(c=d=h.onload=h.onerror=h.onabort=h.onreadystatechange=null,"abort"===a?h.abort():"error"===a?"number"!=typeof h.status?f(0,"error"):f(h.status,h.statusText):f(Rb[h.status]||h.status,h.statusText,"text"!==(h.responseType||"text")||"string"!=typeof h.responseText?{binary:h.response}:{text:h.responseText},h.getAllResponseHeaders()))}},h.onload=c(),d=h.onerror=c("error"),void 0!==h.onabort?h.onabort=d:h.onreadystatechange=function(){4===h.readyState&&a.setTimeout(function(){c&&d()})},c=c("abort");try{h.send(b.hasContent&&b.data||null)}catch(i){if(c)throw i}},abort:function(){c&&c()}}}),r.ajaxPrefilter(function(a){a.crossDomain&&(a.contents.script=!1)}),r.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(a){return r.globalEval(a),a}}}),r.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),r.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(e,f){b=r(" - + diff --git a/monocle/templates/report.html b/monocle/templates/report.html index 8c1173696..2d398b210 100644 --- a/monocle/templates/report.html +++ b/monocle/templates/report.html @@ -11,7 +11,7 @@ } - + + + - -
- - - {{ social_links }} - -
- - - -
-
Filters Settings
-
Panel Content
+ +
+ + + {{ social_links }} +
+ + +
+
+ Filters Settings +
+
+ Panel Content +
+
+
+
+ More Settings +
+
+
Fixed markers opacity
+
+ +
-
-
More Settings
-
-
Fixed markers opacity
-
- - -
-
-
Show timer under markers
-
- - -
-
-
Reset all preferences
- -
+
+
Show timer under markers
+
+ +
+
+
Reset all preferences
+ +
- - - - - - - - - {{ extra_css_js }} +
+ + + + + + {{ extra_css_js }} diff --git a/monocle/templates/report.html b/monocle/templates/report.html index 2d398b210..d8175285c 100644 --- a/monocle/templates/report.html +++ b/monocle/templates/report.html @@ -1,19 +1,23 @@ - - Monocle Report - {{ area_name }} - - - - - - - + + + -
-

Monocle Report

-

Generated on {{ current_date.strftime('%Y-%m-%d %H:%M:%S') }}

- -

Disclaimer: data may be incomplete due to various issues that might have happened (bugs, unstable servers, bugs on the servers etc.). If there is data about a sighting of a Pokemon, that spawn almost certainly happened. On the other hand, there is no guarantee that the database contains all spawns, so there may be Pokemon missing from this report. Your mileage may vary.

- -

This report contains statistics about data gathered from scanning {{ area_name }}.

- -

During that session, {{ total_spawn_count }} Pokemon have been seen on an area of about {{ area_size }} square km. Data gathering started on {{ session_start.strftime('%Y-%m-%d %H:%M:%S') }} and ended on {{ session_end.strftime('%Y-%m-%d %H:%M:%S') }}, lasting {{ session_length_hours }} hours. There were {{ spawns_per_hour }} spawns per hour on average.

- -

Below chart shows number of spawns seen per 5 minutes blocks:

- -
- {% if google_maps_key %} -

Heatmap

- -

All noticed spawn locations. The redder the point is, more Pokemon spawn there.

- -

(will slow down browser!)

- -
+
+

Monocle Report

+

Generated on {{ current_date.strftime('%Y-%m-%d %H:%M:%S') }}

+

Disclaimer: data may be incomplete due to various issues that might have happened (bugs, unstable + servers, bugs on the servers etc.). If there is data about a sighting of a Pokemon, that spawn almost certainly + happened. On the other hand, there is no guarantee that the database contains all spawns, so there may be + Pokemon missing from this report. Your mileage may vary.

+

This report contains statistics about data gathered from scanning {{ area_name }}.

+

During that session, {{ total_spawn_count }} Pokemon have been seen on an area of about {{ area_size + }} square km. Data gathering started on {{ session_start.strftime('%Y-%m-%d %H:%M:%S') }} and ended on + {{ session_end.strftime('%Y-%m-%d %H:%M:%S') }}, lasting {{ session_length_hours }} hours. There were + {{ spawns_per_hour }} spawns per hour on average.

+

Below chart shows number of spawns seen per 5 minutes blocks:

+
{% if google_maps_key %} +

Heatmap

+

All noticed spawn locations. The redder the point is, more Pokemon spawn there.

+

(will slow down browser!)

+
{% endif %} +

Most & least frequently spawning species

+

Top 30 that spawned the most number of times during above period:

+
+ {% for icon in icons.top30 %} + + {% if loop.index > 0 and loop.index % 10 == 0 %} +
{% endif %} -

Most & least frequently spawning species

- -

Top 30 that spawned the most number of times during above period:

- -
- {% for icon in icons.top30 %} - - {% if loop.index > 0 and loop.index % 10 == 0 %} -
- {% endif %} - {% endfor %} -
- -
- -

Bottom 30 that spawned the least number of times during above period, and all of their spawning places:

- -
- {% for icon in icons.bottom30 %} - - {% if loop.index > 0 and loop.index % 10 == 0 %} -
- {% endif %} - {% endfor %} -
- -
- -

Evolutions and rare Pokemon

- -

Stage 2 evolutions and Pokemon subjectively considered "rare" by author of this report, together with their spawning places:

- -
- {% for icon in icons.rare %} - - {% if loop.index > 0 and loop.index % 10 == 0 %} -
- {% endif %} - {% endfor %} -
- -
- {% if google_maps_key %} -
+ {% endfor %} +
+
+

Bottom 30 that spawned the least number of times during above period, and all of their spawning + places:

+
+ {% for icon in icons.bottom30 %} + + {% if loop.index > 0 and loop.index % 10 == 0 %} +
{% endif %} - -

Nonexistent species

- -

Those Pokemon didn't spawn during data gathering period:

- -
- {% for icon in icons.nonexistent %} - - {% if loop.index > 0 and loop.index % 10 == 0 %} -
- {% endif %} - {% endfor %} -
- -

Footnotes

- -

This report was generated using Monocle, a tool for gathering data about Pokemon Go.

- -

Check out Monocle on GitHub for more info.

- -

This report is available under Creative Commons CC-BY-4.0 license: https://creativecommons.org/licenses/by/4.0/.

+ {% endfor %} +
+
+

Evolutions and rare Pokemon

+

Stage 2 evolutions and Pokemon subjectively considered "rare" by author of this report, together with + their spawning places:

+
+ {% for icon in icons.rare %} + + {% ifloop.index > 0 and loop.index % 10 == 0 %} +
+ {% endif %} + {% endfor %} +
+
+ {% if google_maps_key %} +
+ {% endif %} +

Nonexistent species

+

Those Pokemon didn't spawn during data gathering period:

+
+ {% for icon in icons.nonexistent %} + + {% if loop.index > 0 and loop.index % 10 == 0 %} +
+ {% endif %} + {% endfor %}
- +

Footnotes

+

This report was generated using Monocle, a tool for gathering data about Pokemon Go.

+

Check out Monocle on GitHub for more info.

+

This report is available under Creative Commons CC-BY-4.0 license: https://creativecommons.org/licenses/by/4.0/.

+
+ diff --git a/monocle/templates/report_single.html b/monocle/templates/report_single.html index 44bf193de..f5254c6de 100644 --- a/monocle/templates/report_single.html +++ b/monocle/templates/report_single.html @@ -1,11 +1,16 @@ - - Monocle Report: {{ pokemon_name }} in {{ area_name }} - - - - - - - + + + -
-

Monocle Report: #{{ pokemon_id }} {{ pokemon_name }}

-

Generated on {{ current_date.strftime('%Y-%m-%d %H:%M:%S') }}

- -

Disclaimer: data may be incomplete due to various issues that might have happened (bugs, unstable servers, bugs on the servers etc.). If there is data about a sighting of a Pokemon, that spawn almost certainly happened. On the other hand, there is no guarantee that the database contains all spawns, so there may be Pokemon missing from this report. Your mileage may vary.

- -

This report contains statistics about data gathered from scanning {{ area_name }} for single species - {{ pokemon_name }}.

- -
- {{ pokemon_name }} -
- -

During that session, {{ total_spawn_count }} sightings of {{ pokemon_name }} have been seen on an area of about {{ area_size }} square km. Data gathering started on {{ session_start.strftime('%Y-%m-%d %H:%M:%S') }} and ended on {{ session_end.strftime('%Y-%m-%d %H:%M:%S') }}, lasting {{ session_length_hours }} hours.

- - {% if google_maps_key %} -

Heatmap

- -

All noticed spawn locations of {{ pokemon_name }}. The redder the point is, {{ pokemon_name }} spawned more often there.

- -

(will slow down browser!)

- -
- {% endif %} - -

Spawning hours

- -

At what time of the day has {{ pokemon_name }} been seen most number of times?

- -
- -

Footnotes

- -

This report was generated using Monocle, a tool for gathering data about Pokemon Go.

- -

Check out Monocle on GitHub for more info.

- -

This report is available under Creative Commons CC-BY-4.0 license: https://creativecommons.org/licenses/by/4.0/.

-
- +
+

Monocle Report: #{{ pokemon_id }} {{ pokemon_name }}

+

Generated on {{ current_date.strftime('%Y-%m-%d %H:%M:%S') }}

+

Disclaimer: data may be incomplete due to various issues that might have happened (bugs, unstable + servers, bugs on the servers etc.). If there is data about a sighting of a Pokemon, that spawn almost certainly + happened. On the other hand, there is no guarantee that the database contains all spawns, so there may be + Pokemon missing from this report. Your mileage may vary.

+

This report contains statistics about data gathered from scanning {{ area_name }} for single species - {{ + pokemon_name }}.

+
+
+

During that session, {{ total_spawn_count }} sightings of {{ pokemon_name }} have been seen on an area of + about {{ area_size }} square km. Data gathering started on {{ session_start.strftime('%Y-%m-%d %H:%M:%S') + }} and ended on {{ session_end.strftime('%Y-%m-%d %H:%M:%S') }}, lasting {{ session_length_hours }} + hours.

+ {% if google_maps_key %} +

Heatmap

+

All noticed spawn locations of {{ pokemon_name }}. The redder the point is, {{ pokemon_name }} spawned more + often there.

+

(will slow down browser!)

+
+ {% endif %} +

Spawning hours

+

At what time of the day has {{ pokemon_name }} been seen most number of times?

+
+

Footnotes

+

This report was generated using Monocle, a tool for gathering data about Pokemon Go.

+

Check out Monocle on GitHub for more info.

+

This report is available under Creative Commons CC-BY-4.0 license: https://creativecommons.org/licenses/by/4.0/.

+
+ diff --git a/monocle/templates/workersmap.html b/monocle/templates/workersmap.html index c61288fa6..86836c27d 100644 --- a/monocle/templates/workersmap.html +++ b/monocle/templates/workersmap.html @@ -1,25 +1,28 @@ - - Monocle - {{ area_name }} - - - - + + Monocle - {{ area_name }} + + + + + + + + + + -

Monocle is initializing, please wait.

+

Monocle is initializing, please wait.

+
+ + {{ social_links }} + + + - - + From 8ff947c6c1fc58aca06eb928e5e47f70b9cd698c Mon Sep 17 00:00:00 2001 From: David Christenson Date: Wed, 17 May 2017 17:02:22 -0600 Subject: [PATCH 18/38] Change requirements to use pogeo develop branch 0.4 has not been released yet so use the latest commit from the develop branch instead. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 60e1fb2ae..aa627111f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,5 @@ protobuf>=3.0.0 sqlalchemy>=1.1.0 aiopogo>=1.9.1 aiohttp>=2.0.7,<2.1 -pogeo>=0.4.0 +https://github.com/Noctem/pogeo/archive/develop.zip cyrandom>=0.3.0 From d2916635435ddfb8ed044ba56b1f13cd01c59c5d Mon Sep 17 00:00:00 2001 From: David Christenson Date: Fri, 19 May 2017 15:21:00 -0600 Subject: [PATCH 19/38] Use AioSpawnCache Use asynchronous spawn cache in pogeo.monotools. --- web.py | 2 +- web_sanic.py | 27 ++++++++++++++++++--------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/web.py b/web.py index 017b61e5d..7561cf63b 100755 --- a/web.py +++ b/web.py @@ -10,9 +10,9 @@ except ImportError: from json import dumps +from flask import Flask, jsonify, make_response, Markup, render_template, request from pogeo.monotools.sightingcache import SightingCache from pogeo.monotools.spawncache import SpawnCache -from flask import Flask, jsonify, make_response, Markup, render_template, request from monocle import bounds, db, names, sanitized as conf from monocle.web_utils import * diff --git a/web_sanic.py b/web_sanic.py index c390f2bb6..1572c88a0 100755 --- a/web_sanic.py +++ b/web_sanic.py @@ -3,11 +3,12 @@ from pkg_resources import resource_filename from time import time +from asyncpg import create_pool +from jinja2 import Environment, PackageLoader, Markup from sanic import Sanic from sanic.response import html, HTTPResponse, json -from jinja2 import Environment, PackageLoader, Markup -from asyncpg import create_pool from pogeo.monotools.aiosightingcache import AioSightingCache +from pogeo.monotools.aiospawncache import AioSpawnCache from monocle import bounds, names, sanitized as conf from monocle.web_utils import get_worker_markers, Workers, get_args @@ -16,7 +17,8 @@ env = Environment(loader=PackageLoader('monocle', 'templates')) app = Sanic(__name__) app.static('/static', resource_filename('monocle', 'static')) -_CACHE = AioSightingCache(conf, names) +_SIGHTINGS = AioSightingCache(conf, names) +_SPAWNS = AioSpawnCache(conf.SPAWN_ID_INT) def social_links(): @@ -93,7 +95,7 @@ async def workers_map(request, html_map=render_worker_map()): @app.get('/data') -async def pokemon_data(request, _cache=_CACHE): +async def pokemon_data(request, _cache=_SIGHTINGS): try: compress = 'gzip' in request.headers['Accept-Encoding'].lower() except KeyError: @@ -140,10 +142,16 @@ async def gym_data(request, names=names.POKEMON, _str=str): @app.get('/spawnpoints') -async def spawn_points(request, _dict=dict): - async with app.pool.acquire() as conn: - results = await conn.fetch('SELECT spawn_id, despawn_time, lat, lon, duration FROM spawnpoints') - return json([_dict(x) for x in results]) +async def spawn_points(request, _cache=_SPAWNS): + try: + compress = 'gzip' in request.headers['Accept-Encoding'].lower() + except KeyError: + compress = False + body = await _cache.get_json(compress) + return HTTPResponse( + body_bytes=body, + content_type='application/json', + headers={'Content-Encoding': 'gzip'} if compress else None) @app.get('/pokestops') @@ -161,7 +169,8 @@ async def scan_coords(request, _response=HTTPResponse(body_bytes=bounds.json, co @app.listener('before_server_start') async def register_db(app, loop): app.pool = await create_pool(dsn=conf.DB_ENGINE, loop=loop) - _CACHE.initialize(loop, app.pool) + _SIGHTINGS.initialize(loop, app.pool) + _SPAWNS.initialize(loop, app.pool) def main(): From e4baa9c7a3d3dbf6d4a62fc33262dc74adba1345 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 23 May 2017 14:13:46 -0600 Subject: [PATCH 20/38] Use pogeo shapes instead of shapely for landmarks Remove dependence on shapely and geopy, improve performance of landmarks, automate landmark pickling/unpickling, read landmarks from config as a sequence of dicts instead of a Landmarks object, a little refactoring, miscellaneous fixes. --- .travis.yml | 2 +- README.md | 1 - config.example.py | 48 ++--- monocle/altitudes.py | 6 +- monocle/landmarks.py | 273 ++++++++++------------------ monocle/notification.py | 29 +-- monocle/overseer.py | 6 +- monocle/sanitized.py | 4 +- optional-requirements.txt | 5 +- requirements.txt | 3 +- scripts/pickle_landmarks.example.py | 35 ---- setup.py | 17 +- 12 files changed, 150 insertions(+), 279 deletions(-) delete mode 100755 scripts/pickle_landmarks.example.py diff --git a/.travis.yml b/.travis.yml index 731bac317..c76d57d3c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,4 +33,4 @@ install: script: - cp accounts.example.csv accounts.csv - python3 scripts/create_db.py - - python3 -c 'from monocle import avatar, bounds, db_proc, db, names, notification, overseer, sanitized, shared, spawns, utils, web_utils, worker' + - python3 -c 'from monocle import avatar, bounds, db_proc, db, landmarks, names, notification, overseer, sanitized, shared, spawns, utils, web_utils, worker' diff --git a/README.md b/README.md index 8fd6698a6..175433da4 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,6 @@ Since it uses [Leaflet](http://leafletjs.com/) for mapping, the appearance and d * *asyncpushbullet* is required for PushBullet notifications * *peony-twitter* is required for Twitter notifications * *gpsoauth* is required for logging in to Google accounts - * *shapely* is required for landmarks or boundary polygons * *selenium* (and [ChromeDriver](https://sites.google.com/a/chromium.org/chromedriver/)) are required for manually solving CAPTCHAs * *uvloop* provides better event loop performance * *pycairo* is required for generating IV/move images diff --git a/config.example.py b/config.example.py index c1661457a..2bc4dfb7a 100644 --- a/config.example.py +++ b/config.example.py @@ -1,4 +1,4 @@ -### All lines that are commented out (and some that aren't) are optional ### +### All lines that are commented out (and many that aren't) are optional ### DB_ENGINE = 'sqlite:///db.sqlite' #DB_ENGINE = 'mysql://user:pass@localhost/monocle' @@ -314,18 +314,8 @@ ##### Referencing landmarks in your tweets/notifications - -#### It is recommended to store the LANDMARKS object in a pickle to reduce startup -#### time if you are using queries. An example script for this is in: -#### scripts/pickle_landmarks.example.py -#from pickle import load -#with open('pickles/landmarks.pickle', 'rb') as f: -# LANDMARKS = load(f) - -### if you do pickle it, just load the pickle and omit everything below this point - -#from monocle.landmarks import Landmarks -#LANDMARKS = Landmarks(query_suffix=AREA_NAME) +## Appended to OpenStreetMap queries if a query and query_suffix aren't provided +QUERY_SUFFIX = 'Salt Lake City' # Landmarks to reference when Pokémon are nearby # If no points are specified then it will query OpenStreetMap for the coordinates @@ -339,20 +329,18 @@ # When selecting a landmark, non-areas will be chosen first if any are close enough # the default phrase is 'in' for areas and 'at' for non-areas, but can be overriden for either. -### replace these with well-known places in your area - -## since no points or query is provided, the names provided will be queried and suffixed with AREA_NAME -#LANDMARKS.add('Rice Eccles Stadium', shortname='Rice Eccles', hashtags={'Utes'}) -#LANDMARKS.add('the Salt Lake Temple', shortname='the temple', hashtags={'TempleSquare'}) - -## provide two corner points to create a square for this area -#LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) - -## provide a query that is different from the landmark name so that OpenStreetMap finds the correct one -#LANDMARKS.add('the State Capitol', shortname='the Capitol', query='Utah State Capitol Building') - -### area examples ### -## query using name, override the default area phrase so that it says 'at (name)' instead of 'in' -#LANDMARKS.add('the University of Utah', shortname='the U of U', hashtags={'Utes'}, phrase='at', is_area=True) -## provide corner points to create a polygon of the area since OpenStreetMap does not have a shape for it -#LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) +''' # these triple quotes are block comments, remove them to fill in your own landmarks +LANDMARKS = ( + # since no points or query is provided, the names provided will be queried and suffixed with QUERY_SUFFIX + {'name': 'Rice Eccles Stadium', 'shortname': 'Rice Eccles', 'hashtags': ['Utes']}, + {'name': 'the Salt Lake Temple', 'shortname': 'the temple', 'hashtags': ['TempleSquare']}, + # provide two corner points to create a square for this area + {'name': 'City Creek Center', 'points': ((40.769210, -111.893901), (40.767231, -111.888275)), 'hashtags': ['CityCreek']}, + # provide a query that is different from the landmark name so that OpenStreetMap finds the correct one + {'name': 'the State Capitol', 'shortname': 'the Capitol', 'query': 'Utah State Capitol Building'}, + # query using name, override the default area phrase so that it says 'at (name)' instead of 'in' + {'name': 'the University of Utah', 'shortname': 'the U of U', 'hashtags': ['Utes'], 'phrase': 'at', 'is_area': True}, + # provide corner points to create a polygon of the area since OpenStreetMap does not have a shape for it + {'name': 'Yalecrest', 'points': ((40.7502, -111.8365), (40.7503, -111.8511), (40.7515, -111.8538), (40.7412, -111.8539), (40.7411, -111.8365)), 'is_area': True} +) +''' diff --git a/monocle/altitudes.py b/monocle/altitudes.py index 6972e4e4a..36c99a83d 100644 --- a/monocle/altitudes.py +++ b/monocle/altitudes.py @@ -1,4 +1,4 @@ -from os import join +from os.path import join from pogeo.altitude import AltitudeCache @@ -26,8 +26,8 @@ def load_alts(): if not unpickled: try: - ALTITUDES.fetch_all() - except Exception(): + ALTITUDES.fetch_all(bounds) + except Exception: log.exception('Error while fetching altitudes.') if ALTITUDES: log.warning('{} altitudes fetched.', len(ALTITUDES)) diff --git a/monocle/landmarks.py b/monocle/landmarks.py index 3d3ae640c..ce43d996d 100644 --- a/monocle/landmarks.py +++ b/monocle/landmarks.py @@ -1,16 +1,11 @@ +from hashlib import sha256 from logging import getLogger -from shapely.geometry import Point, Polygon, shape, box, LineString -from shapely import speedups -from geopy import Nominatim -from pogeo import get_distance +from aiopogo import json_dumps +from pogeo import Location, Loop, Rectangle +from pogeo.geocoder import geocode -if speedups.available: - speedups.enable() - - -class FailedQuery(Exception): - """Raised when no location is found.""" +from .utils import dump_pickle, load_pickle class Landmark: @@ -26,23 +21,23 @@ def __init__(self, name, shortname=None, points=None, query=None, if not points and not query: query = name.lstrip('the ') + # append query suffix if it's not already present in query if ((query_suffix and query) and query_suffix.lower() not in query.lower()): query = '{} {}'.format(query, query_suffix) self.location = None if query: - self.query_location(query) + self.location = geocode(query, self.log) elif points: try: length = len(points) if length > 2: - self.location = Polygon(points) + self.location = Loop(points) elif length == 2: - self.location = box(points[0][0], points[0][1], - points[1][0], points[1][1]) + self.location = Rectangle(*points) elif length == 1: - self.location = Point(*points[0]) + self.location = Location(*points[0]) except TypeError: raise ValueError('points must be a list/tuple of lists/tuples' ' containing 2 coordinates each') @@ -50,13 +45,9 @@ def __init__(self, name, shortname=None, points=None, query=None, if not self.location: raise ValueError('No location provided for {}. Must provide' ' either points, or query.'.format(self.name)) - elif not isinstance(self.location, (Point, Polygon, LineString)): - raise NotImplementedError('{} is a {} which is not supported' - .format(self.name, self.location.type)) - self.south, self.west, self.north, self.east = self.location.bounds - # very imprecise conversion to square meters - self.size = self.location.area * 12100000000 + # square kilometers + self.size = self.location.area if phrase: self.phrase = phrase @@ -67,155 +58,87 @@ def __init__(self, name, shortname=None, points=None, query=None, self.hashtags = hashtags - def __contains__(self, coordinates): - """determine if a point is within this object range""" - lat, lon = coordinates - if (self.south <= lat <= self.north and - self.west <= lon <= self.east): - return self.location.contains(Point(lat, lon)) - return False - - def query_location(self, query): - def swap_coords(geojson): - out = [] - for x in geojson: - if isinstance(x, list): - out.append(swap_coords(x)) - else: - return geojson[1], geojson[0] - return out + def __repr__(self): + center = self.location if isinstance(self.location, Location) else self.location.center + return ''.format(self.name, center, type(self.location), self.size) - nom = Nominatim() - try: - geo = nom.geocode(query=query, geometry='geojson', timeout=3).raw - geojson = geo['geojson'] - except (AttributeError, KeyError): - raise FailedQuery('Query for {} did not return results.'.format(query)) - self.log.info('Nominatim returned {} for {}'.format(geo['display_name'], query)) - geojson['coordinates'] = swap_coords(geojson['coordinates']) - self.location = shape(geojson) - - def get_coordinates(self): - if isinstance(self.location, Polygon): - return tuple(self.location.exterior.coordinates) - else: - return self.location.coords[0] + def __contains__(self, loc): + """determine if a point is within this object range""" + return loc in self.location - def generate_string(self, coordinates): - if coordinates in self: + def generate_string(self, loc): + if loc in self.location: return '{} {}'.format(self.phrase, self.name) - distance = self.distance_from_point(coordinates) + distance = self.location.distance(loc) if distance < 50 or (self.is_area and distance < 100): return '{} {}'.format(self.phrase, self.name) else: return '{:.0f} meters from {}'.format(distance, self.name) - def distance_from_point(self, coordinates): - point = Point(*coordinates) - if isinstance(self.location, Point): - nearest = self.location - else: - nearest = self.nearest_point(point) - return get_distance(coordinates, nearest.coords[0]) - - def nearest_point(self, point): - '''Find nearest point in geometry, measured from given point.''' - if isinstance(self.location, Polygon): - segs = self.pairs(self.location.exterior.coords) - elif isinstance(self.location, LineString): - segs = self.pairs(self.location.coords) - else: - raise NotImplementedError('project_point_to_object not implemented' - "for geometry type '{}'.".format( - self.location.type)) - - nearest_point = None - min_dist = float("inf") - for seg_start, seg_end in segs: - line_start = Point(seg_start) - line_end = Point(seg_end) +class Landmarks: + __slots__ = ('points_of_interest', 'areas') + + def __init__(self, landmarks, query_suffix): + self.areas = [] + self.points_of_interest = [] + + sha = sha256( + json_dumps(landmarks, + ensure_ascii=False, + sort_keys=True).encode('utf-8') + ).digest() + + if not self.unpickle(sha): + for kwargs in landmarks: + if 'query_suffix' not in kwargs and 'query' not in kwargs: + kwargs['query_suffix'] = query_suffix + + landmark = Landmark(**kwargs) + if landmark.is_area: + self.areas.append(landmark) + else: + self.points_of_interest.append(landmark) - intersection_point = self.project_point_to_line( - point, line_start, line_end) - cur_dist = point.distance(intersection_point) + self.pickle(sha) - if cur_dist < min_dist: - min_dist = cur_dist - nearest_point = intersection_point - return nearest_point + def __bool__(self): + return self.points_of_interest or self.areas - @staticmethod - def pairs(lst): - """Iterate over a list in overlapping pairs.""" - i = iter(lst) - prev = next(i) - for item in i: - yield prev, item - prev = item + def pickle(self, sha): + dump_pickle('landmarks', { + 'areas': self.areas, + 'points_of_interest': self.points_of_interest, + 'sha': sha}) - @staticmethod - def project_point_to_line(point, line_start, line_end): - '''Find nearest point on a straight line, - measured from given point.''' - line_magnitude = line_start.distance(line_end) - - u = (((point.x - line_start.x) * (line_end.x - line_start.x) + - (point.y - line_start.y) * (line_end.y - line_start.y)) - / (line_magnitude ** 2)) - - # closest point does not fall within the line segment, - # take the shorter distance to an endpoint - if u < 0.00001 or u > 1: - ix = point.distance(line_start) - iy = point.distance(line_end) - if ix > iy: - return line_end + def unpickle(self, sha): + try: + pickled = load_pickle('landmarks', raise_exception=True) + if sha == pickled['sha']: + self.areas = pickled['areas'] + self.points_of_interest = pickled['points_of_interest'] + return True else: - return line_start - else: - ix = line_start.x + u * (line_end.x - line_start.x) - iy = line_start.y + u * (line_end.y - line_start.y) - return Point([ix, iy]) - - -class Landmarks: - - def __init__(self, query_suffix=None): - self.points_of_interest = set() - self.areas = set() - self.query_suffix = query_suffix - - def add(self, *args, **kwargs): - if ('query_suffix' not in kwargs) and self.query_suffix and ( - 'query' not in kwargs): - kwargs['query_suffix'] = self.query_suffix - landmark = Landmark(*args, **kwargs) - if landmark.is_area: - self.areas.add(landmark) - else: - self.points_of_interest.add(landmark) - if landmark.size < 1: - print(landmark.name, type(landmark.location), '\n') - else: - print(landmark.name, landmark.size, type(landmark.location), '\n') + return False + except (FileNotFoundError, KeyError): + return False def find_landmark(self, coords, max_distance=750): - landmark = find_within(self.points_of_interest, coords) + landmark = self.find_within(self.points_of_interest, coords) if landmark: return landmark - landmark, distance = find_closest(self.points_of_interest, coords) + landmark, distance = self.find_closest(self.points_of_interest, coords) try: if distance < max_distance: return landmark except TypeError: pass - area = find_within(self.areas, coords) + area = self.find_within(self.areas, coords) if area: return area - area, area_distance = find_closest(self.areas, coords) + area, area_distance = self.find_closest(self.areas, coords) try: if area and area_distance < distance: @@ -225,37 +148,37 @@ def find_landmark(self, coords, max_distance=750): except TypeError: return area + @staticmethod + def find_within(landmarks, coordinates): + within = [landmark for landmark in landmarks if coordinates in landmark] + found = len(within) + if found == 1: + return within[0] + if found: + landmarks = iter(within) + smallest = next(landmarks) + smallest_size = landmark.size + for landmark in landmarks: + if landmark.size < smallest_size: + smallest = landmark + smallest_size = landmark.size + return smallest + return None -def find_within(landmarks, coordinates): - within = [landmark for landmark in landmarks if coordinates in landmark] - found = len(within) - if found == 1: - return within[0] - if found: - landmarks = iter(within) - smallest = next(landmarks) - smallest_size = landmark.size + @staticmethod + def find_closest(landmarks, coordinates): + landmarks = iter(landmarks) + try: + closest_landmark = next(landmarks) + except StopIteration: + return None, None + shortest_distance = closest_landmark.location.distance(coordinates) for landmark in landmarks: - if landmark.size < smallest_size: - smallest = landmark - smallest_size = landmark.size - return smallest - return None - - -def find_closest(landmarks, coordinates): - landmarks = iter(landmarks) - try: - closest_landmark = next(landmarks) - except StopIteration: - return None, None - shortest_distance = closest_landmark.distance_from_point(coordinates) - for landmark in landmarks: - distance = landmark.distance_from_point(coordinates) - if distance <= shortest_distance: - if (distance == shortest_distance - and landmark.size > closest_landmark.size): - continue - shortest_distance = distance - closest_landmark = landmark - return closest_landmark, shortest_distance + distance = landmark.location.distance(coordinates) + if distance <= shortest_distance: + if (distance == shortest_distance + and landmark.size > closest_landmark.size): + continue + shortest_distance = distance + closest_landmark = landmark + return closest_landmark, shortest_distance diff --git a/monocle/notification.py b/monocle/notification.py index 9bb2a77b4..44cee2a7a 100755 --- a/monocle/notification.py +++ b/monocle/notification.py @@ -8,12 +8,14 @@ from aiohttp import ClientError, ClientResponseError, ServerTimeoutError from aiopogo import json_dumps, json_loads +from pogeo import Location -from .utils import load_pickle, dump_pickle +from . import sanitized as conf from .db import session_scope, get_pokemon_ranking, estimate_remaining_time +from .landmarks import Landmarks from .names import MOVES, POKEMON from .shared import get_logger, SessionManager, LOOP, run_threaded -from . import sanitized as conf +from .utils import load_pickle, dump_pickle WEBHOOK = False @@ -226,10 +228,13 @@ def draw_name(self, pos, font=conf.NAME_FONT): class Notification: + landmarks = (Landmarks(conf.LANDMARKS, conf.QUERY_SUFFIX) + if conf.LANDMARKS and (TWITTER or PUSHBULLET) else None) + def __init__(self, pokemon, score, time_of_day): self.pokemon = pokemon self.name = POKEMON[pokemon['pokemon_id']] - self.coordinates = pokemon['lat'], pokemon['lon'] + self.coordinates = Location(pokemon['lat'], pokemon['lon']) self.score = score self.time_of_day = time_of_day self.log = get_logger('notifier') @@ -257,11 +262,6 @@ def __init__(self, pokemon, score, time_of_day): _tz = None now = datetime.fromtimestamp(pokemon['seen'], _tz) - if TWITTER and conf.HASHTAGS: - self.hashtags = conf.HASHTAGS.copy() - else: - self.hashtags = set() - # check if expiration time is known, or a range try: self.tth = pokemon['time_till_hidden'] @@ -292,15 +292,16 @@ def __init__(self, pokemon, score, time_of_day): self.place = None async def notify(self): - if conf.LANDMARKS and (TWITTER or PUSHBULLET): - self.landmark = conf.LANDMARKS.find_landmark(self.coordinates) - - try: + if self.landmarks: + self.landmark = self.landmarks.find_landmark(self.coordinates) self.place = self.landmark.generate_string(self.coordinates) - if TWITTER and self.landmark.hashtags: + if TWITTER: + self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else set() self.hashtags.update(self.landmark.hashtags) - except AttributeError: + else: self.place = self.generic_place_string() + if TWITTER: + self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else None if PUSHBULLET or TELEGRAM: try: diff --git a/monocle/overseer.py b/monocle/overseer.py index 52b7796d5..e552cd688 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -56,15 +56,15 @@ } _unit = conf.SPEED_UNIT.lower() -if _unit == "miles": +if _unit == 'miles': # miles/hour to meters/second, default to 19.5mph SPEED_LIMIT = conf.SPEED_LIMIT * 0.44704 if conf.SPEED_LIMIT else 8.71728 GOOD_ENOUGH = conf.GOOD_ENOUGH * 0.44704 if conf.GOOD_ENOUGH else 0.44704 -if _unit == "kilometers": +elif _unit == 'kilometers': # kilometers/hour to meters/second, default to 31.38km/h SPEED_LIMIT = conf.SPEED_LIMIT * 1000 / 3600 if conf.SPEED_LIMIT else 8.71728 GOOD_ENOUGH = conf.GOOD_ENOUGH * 1000 / 3600 if conf.GOOD_ENOUGH else 0.44704 -elif _unit is "meters": +elif _unit == 'meters': # meters/hour to meters/second SPEED_LIMIT = conf.SPEED_LIMIT / 3600 if conf.SPEED_LIMIT else 8.71728 GOOD_ENOUGH = conf.GOOD_ENOUGH / 3600 if conf.GOOD_ENOUGH else 0.44704 diff --git a/monocle/sanitized.py b/monocle/sanitized.py index dff2ddd87..9f4ecfe99 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -61,7 +61,7 @@ 'INITIAL_SCORE': Number, 'ITEM_LIMITS': dict, 'IV_FONT': str, - 'LANDMARKS': object, + 'LANDMARKS': sequence, 'LANGUAGE': str, 'LAST_MIGRATION': Number, 'LOAD_CUSTOM_CSS_FILE': bool, @@ -92,6 +92,7 @@ 'PLAYER_LOCALE': dict, 'PROVIDER': str, 'PROXIES': set_sequence, + 'QUERY_SUFFIX': str, 'RARE_IDS': set_sequence_range, 'RARITY_OVERRIDE': dict, 'REFRESH_RATE': Number, @@ -200,6 +201,7 @@ 'PROXIES': None, 'RARE_IDS': frozenset(), 'RARITY_OVERRIDE': {}, + 'QUERY_SUFFIX': None, 'REFRESH_RATE': 0.6, 'REPORT_MAPS': True, 'REPORT_SINCE': None, diff --git a/optional-requirements.txt b/optional-requirements.txt index 982bb6f19..92f8dac2f 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -1,16 +1,13 @@ asyncpushbullet>=0.12 peony-twitter>=0.9.3 gpsoauth>=0.4.0 -shapely>=1.3.0 selenium>=3.0 uvloop>=0.7.0 cchardet>=1.1.0 aiodns>=1.1.0 aiosocks>=0.2.2 ujson>=1.35 -sanic>=0.3 -flask>=0.11.1 -geopy>=1.11.0 +sanic>=0.5 asyncpg>=0.8 psycopg2>=2.6 mysqlclient>=1.3 diff --git a/requirements.txt b/requirements.txt index e1a8b274e..b93f094aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,6 @@ protobuf>=3.0.0 sqlalchemy>=1.1.0 aiopogo>=2.0.0 aiohttp>=2.0.7,<2.1 -https://github.com/Noctem/pogeo/archive/develop.zip +pogeo>=0.4 cyrandom>=0.3.0 +flask>=0.11.1 diff --git a/scripts/pickle_landmarks.example.py b/scripts/pickle_landmarks.example.py deleted file mode 100755 index 96cc8cf59..000000000 --- a/scripts/pickle_landmarks.example.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import pickle - -from pathlib import Path - -monocle_dir = Path(__file__).resolve().parents[1] -sys.path.append(str(monocle_dir)) - -from monocle.landmarks import Landmarks - -pickle_dir = monocle_dir / 'pickles' -pickle_dir.mkdir(exist_ok=True) - -LANDMARKS = Landmarks(query_suffix='Salt Lake City') - -# replace the following with your own landmarks -LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'}) -LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'}) -LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) -LANDMARKS.add('the State Capitol', query='Utah State Capitol Building') -LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True) -LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) - -pickle_path = pickle_dir / 'landmarks.pickle' -with pickle_path.open('wb') as f: - pickle.dump(LANDMARKS, f, pickle.HIGHEST_PROTOCOL) - - -print('\033[94mDone. Now add the following to your config:\n\033[92m', - 'import pickle', - "with open('pickles/landmarks.pickle', 'rb') as f:", - ' LANDMARKS = pickle.load(f)', - sep='\n') diff --git a/setup.py b/setup.py index 42165f269..906096db8 100755 --- a/setup.py +++ b/setup.py @@ -13,29 +13,24 @@ zip_safe=False, scripts=('scan.py', 'web.py', 'web_sanic.py', 'gyms.py', 'solve_captchas.py'), install_requires=[ - 'geopy>=1.11.0', 'protobuf>=3.0.0', - 'flask>=0.11.1', - 'gpsoauth>=0.4.0', - 'werkzeug>=0.11.15', 'sqlalchemy>=1.1.0', - 'aiopogo>=1.8.0', - 'polyline>=1.3.1', + 'aiopogo>=2.0.0', 'aiohttp>=2.0.7,<2.1', - 'pogeo==0.3.*', - 'cyrandom>=0.1.2' + 'pogeo>=0.4', + 'cyrandom>=0.3.0', + 'flask>=0.11.1' ], extras_require={ 'twitter': ['peony-twitter>=0.9.3'], 'pushbullet': ['asyncpushbullet>=0.12'], - 'landmarks': ['shapely>=1.3.0'], - 'boundaries': ['shapely>=1.3.0'], 'manual_captcha': ['selenium>=3.0'], 'performance': ['uvloop>=0.7.0', 'cchardet>=1.1.0', 'aiodns>=1.1.0', 'ujson>=1.35'], 'mysql': ['mysqlclient>=1.3'], 'postgres': ['psycopg2>=2.6'], 'images': ['pycairo>=1.10.0'], 'socks': ['aiosocks>=0.2.2'], - 'sanic': ['sanic>=0.4', 'asyncpg>=0.8', 'ujson>=1.35'] + 'sanic': ['sanic>=0.5', 'asyncpg>=0.8', 'ujson>=1.35'], + 'google_accounts': ['gpsoauth>=0.4.0'] } ) From b810e2b7cdd612b1dba522e36241a472d59376ca Mon Sep 17 00:00:00 2001 From: David Christenson Date: Sat, 27 May 2017 22:31:56 -0600 Subject: [PATCH 21/38] Add a script for converting landmarks config Add an example script for converting old-style landmarks configurations (constructing an actual Landmarks instance in config.py) with the new-style of using standard Python data-types (a tuple/list of dicts). --- scripts/convert_landmarks_example.py | 39 ++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100755 scripts/convert_landmarks_example.py diff --git a/scripts/convert_landmarks_example.py b/scripts/convert_landmarks_example.py new file mode 100755 index 000000000..4b3addf73 --- /dev/null +++ b/scripts/convert_landmarks_example.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 + + +class Landmarks: + args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix' + + def __init__(self, query_suffix=None): + self.query_suffix = query_suffix + + self.landmarks = [] + + def add(self, *args, **kwargs): + dictionary = {self.args[num]: arg for num, arg in enumerate(args)} + dictionary.update(kwargs) + + self.landmarks.append(dictionary) + + def print_config(self): + print('Replace your old Landmarks config with the following:\n') + + if self.query_suffix: + print("QUERY_SUFFIX = '{}'".format(self.query_suffix)) + + print('LANDMARKS =', tuple(self.landmarks)) + + +### replace example below with your own old-style landmarks config ### +LANDMARKS = Landmarks(query_suffix='Salt Lake City') + +LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'}) +LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'}) +LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) +LANDMARKS.add('the State Capitol', query='Utah State Capitol Building') +LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True) +LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) +### replace example above with your own old-style landmarks config ### + + +LANDMARKS.print_config() From 12d1a8526357e49a6b66a3d28bdfeabd4edffd70 Mon Sep 17 00:00:00 2001 From: David Christenson Date: Tue, 30 May 2017 14:52:14 -0600 Subject: [PATCH 22/38] Bug fixes Use correct cache in add_mystery, use visit_point instead of visit in Overseer, fix setting spawn/despawn times in update_spawns, fix some variable names in Worker. --- monocle/db.py | 4 ++-- monocle/landmarks.py | 2 +- monocle/notification.py | 6 +++--- monocle/overseer.py | 4 ++-- monocle/spawns.py | 6 +++--- monocle/worker.py | 31 +++++++++++++++------------- scripts/convert_landmarks_example.py | 8 +++---- scripts/test_notifications.py | 2 +- 8 files changed, 33 insertions(+), 30 deletions(-) diff --git a/monocle/db.py b/monocle/db.py index 98fc9432c..abeedf2b6 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -415,10 +415,10 @@ def add_mystery_spawnpoint(session, pokemon): )) if spawnid_to_loc(spawn_id) in bounds: - spawns.unknowns.add(spawn_id) + spawns.unknown.add(spawn_id) -def add_mystery(session, pokemon, _cache=SIGHTING_CACHE, _encounter=conf.ENCOUNTER): +def add_mystery(session, pokemon, _cache=MYSTERY_CACHE, _encounter=conf.ENCOUNTER): if pokemon in _cache: return add_mystery_spawnpoint(session, pokemon) diff --git a/monocle/landmarks.py b/monocle/landmarks.py index ce43d996d..915724fac 100644 --- a/monocle/landmarks.py +++ b/monocle/landmarks.py @@ -103,7 +103,7 @@ def __init__(self, landmarks, query_suffix): self.pickle(sha) def __bool__(self): - return self.points_of_interest or self.areas + return bool(self.points_of_interest or self.areas) def pickle(self, sha): dump_pickle('landmarks', { diff --git a/monocle/notification.py b/monocle/notification.py index 44cee2a7a..f96ed637c 100755 --- a/monocle/notification.py +++ b/monocle/notification.py @@ -289,7 +289,6 @@ def __init__(self, pokemon, score, time_of_day): now + max_delta).strftime('%I:%M %p').lstrip('0') self.map_link = 'https://maps.google.com/maps?q={0[0]:.5f},{0[1]:.5f}'.format(self.coordinates) - self.place = None async def notify(self): if self.landmarks: @@ -297,11 +296,12 @@ async def notify(self): self.place = self.landmark.generate_string(self.coordinates) if TWITTER: self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else set() - self.hashtags.update(self.landmark.hashtags) + if self.landmark.hashtags: + self.hashtags.update(self.landmark.hashtags) else: self.place = self.generic_place_string() if TWITTER: - self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else None + self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else set() if PUSHBULLET or TELEGRAM: try: diff --git a/monocle/overseer.py b/monocle/overseer.py index e552cd688..c1b5643cd 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -454,7 +454,7 @@ async def try_again(self, point): async with self.coroutine_semaphore: worker = await self.best_worker(point, False) async with worker.busy: - if await worker.visit(point): + if await worker.visit_point(point): self.visits += 1 async def bootstrap(self): @@ -533,7 +533,7 @@ async def try_spawn(self, spawn_id, spawn_time=None, skip_time=conf.GIVE_UP_KNOW if spawn_time: worker.after_spawn = time() - spawn_time - if await worker.visit(point, spawn_id): + if await worker.visit_point(location, spawn_id): self.visits += 1 except CancelledError: raise diff --git a/monocle/spawns.py b/monocle/spawns.py index 954f0bc0c..0c5ea7c86 100644 --- a/monocle/spawns.py +++ b/monocle/spawns.py @@ -45,9 +45,9 @@ def add_known(self, spawn_id, despawn_time): self.unknown.discard(spawn_id) def update(self, _migration=conf.LAST_MIGRATION, _contains=contains_spawn): + known = {} with db.session_scope() as session: query = session.query(db.Spawnpoint.spawn_id, db.Spawnpoint.despawn_time, db.Spawnpoint.duration, db.Spawnpoint.updated) - known = {} for spawn_id, despawn_time, duration, updated in query: # skip if point is not within boundaries (if applicable) if not _contains(spawn_id): @@ -57,9 +57,9 @@ def update(self, _migration=conf.LAST_MIGRATION, _contains=contains_spawn): self.unknown.add(spawn_id) continue - self.despawn_times[spawn_id] = despawn_time if duration == 60 else (despawn_time + 1800) % 3600 + self.despawn_times[spawn_id] = despawn_time - known[spawn_id] = spawn_time + known[spawn_id] = despawn_time if duration == 60 else (despawn_time + 1800) % 3600 if known: self.known = OrderedDict(sorted(known.items(), key=lambda k: k[1])) diff --git a/monocle/worker.py b/monocle/worker.py index 8d63df29d..10f0ab1a4 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -9,6 +9,7 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc from cyrandom import choice, randint, uniform +from pogeo import Location from pogeo.utils import location_to_cellid, location_to_token from .altitudes import load_alts, set_altitude @@ -100,7 +101,7 @@ def initialize_api(self): self.empty_visits = 0 self.api = PGoApi(device_info=device_info) - self.api.location = self.location + self.api.position = self.location if self.proxies: self.api.proxy = next(self.proxies) try: @@ -587,7 +588,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False async def bootstrap_visit(self, point): for _ in range(3): - if await self.visit(point, bootstrap=True): + if await self.visit_point(point, bootstrap=True): return True self.error_code = '∞' self.location.jitter(0.00005, 0.00005, 0.5) @@ -599,16 +600,16 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): try: set_altitude(point) self.location = point - self.api.location = self.location + self.api.position = self.location if not self.authenticated: await self.login() - return await self.visit(point, spawn_id, bootstrap) + return await self.visit(spawn_id, bootstrap) except ex.NotLoggedInException: self.error_code = 'NOT AUTHENTICATED' await sleep(1, loop=LOOP) if not await self.login(reauth=True): await self.swap_account(reason='reauth failed') - return await self.visit(self.location, spawn_id, bootstrap) + return await self.visit(spawn_id, bootstrap) except ex.AuthException as e: self.log.warning('Auth error on {}: {}', self.username, e) self.error_code = 'NOT AUTHENTICATED' @@ -810,12 +811,12 @@ async def visit(self, spawn_id, bootstrap, self.visits += 1 if conf.MAP_WORKERS: - self.worker_dict[self.worker_no] = (point, self.location.time, - self.speed, self.total_seen, self.visits, pokemon_seen) + self.worker_dict[self.worker_no] = ( + self.location, self.location.time,self.speed, self.total_seen, + self.visits, pokemon_seen) self.log.info( 'Point processed, {} Pokemon and {} forts seen!', - pokemon_seen, - forts_seen) + pokemon_seen, forts_seen) self.update_accounts_dict() self.handle = LOOP.call_later(60, self.unset_code) @@ -835,7 +836,7 @@ def smart_throttle(self, requests=1): async def spin_pokestop(self, pokestop): self.error_code = '$' - distance = self.location.distance_meters(Location(pokestop['lat'], pokestop['lon'])) + distance = self.location.distance(Location(pokestop['lat'], pokestop['lon'])) # permitted interaction distance - 2 (for some jitter/calculation leeway) # estimation of spinning speed limit if distance > 38.0 or self.speed > 8.611: @@ -870,8 +871,8 @@ async def spin_pokestop(self, pokestop): if result == 1: self.log.info('Spun {}.', name) elif result == 2: - self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}{}', - name, distance, self.speed, UNIT_STRING) + self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}m/s', + name, distance, self.speed) elif result == 3: self.log.warning('{} was in the cooldown period.', name) elif result == 4: @@ -887,7 +888,7 @@ async def spin_pokestop(self, pokestop): self.error_code = '!' async def encounter(self, pokemon, spawn_id): - distance_to_pokemon = self.location.distance_meters(Location(pokemon['lat'], pokemon['lon'])) + distance_to_pokemon = self.location.distance(Location(pokemon['lat'], pokemon['lon'])) self.error_code = '~' @@ -1146,6 +1147,8 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): 'type': 'pokemon', 'encounter_id': raw.encounter_id, 'pokemon_id': raw.pokemon_data.pokemon_id, + 'lat': raw.latitude, + 'lon': raw.longitude, 'spawn_id': int(raw.spawn_point_id, 16) if spawn_int else raw.spawn_point_id, 'seen': tss } @@ -1166,7 +1169,7 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): @staticmethod def normalize_lured(raw, now, sid=location_to_cellid if conf.SPAWN_ID_INT else location_to_token): lure = raw.lure_info - loc = Location(raw['latitude'], raw['longitude']) + loc = Location(raw.latitude, raw.longitude) return { 'type': 'pokemon', 'encounter_id': lure.encounter_id, diff --git a/scripts/convert_landmarks_example.py b/scripts/convert_landmarks_example.py index 4b3addf73..39d55b49b 100755 --- a/scripts/convert_landmarks_example.py +++ b/scripts/convert_landmarks_example.py @@ -27,11 +27,11 @@ def print_config(self): ### replace example below with your own old-style landmarks config ### LANDMARKS = Landmarks(query_suffix='Salt Lake City') -LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'}) -LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'}) -LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) +LANDMARKS.add('Rice Eccles Stadium', hashtags=['Utes']) +LANDMARKS.add('the Salt Lake Temple', hashtags=['TempleSquare']) +LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags=['CityCreek']) LANDMARKS.add('the State Capitol', query='Utah State Capitol Building') -LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True) +LANDMARKS.add('the University of Utah', hashtags=['Utes'], phrase='at', is_area=True) LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) ### replace example above with your own old-style landmarks config ### diff --git a/scripts/test_notifications.py b/scripts/test_notifications.py index ef7476eba..965dbb6a9 100755 --- a/scripts/test_notifications.py +++ b/scripts/test_notifications.py @@ -53,7 +53,7 @@ if args.id == 0: names.POKEMON[0] = 'Test' else: - pokemon_id = randint(1, 252) + pokemon_id = randint(1, 251) if not args.unmodified: conf.ALWAYS_NOTIFY_IDS = {pokemon_id} From b2d9ecd34454eec52d34b1bdf943b5e30b2ab25e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Sun, 11 Jun 2017 18:31:37 +0200 Subject: [PATCH 23/38] c --- monocle/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monocle/worker.py b/monocle/worker.py index 35c0e1cc0..a0413441e 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -158,7 +158,7 @@ async def login(self, reauth=False): raise err self.error_code = '°' - version = 6301 + version = 6304 async with self.sim_semaphore: self.error_code = 'APP SIMULATION' if conf.APP_SIMULATION: From ff23ca38e3b51f5b16d32f06617e4986b8456eea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 08:16:08 +0200 Subject: [PATCH 24/38] Merge master --- .travis.yml | 10 +- README.md | 3 +- config.example.py | 127 ++-- monocle/__init__.py | 11 +- monocle/altitudes.py | 180 +++++- monocle/bounds.py | 97 +++- monocle/db.py | 99 ++-- monocle/landmarks.py | 273 +++++---- monocle/names.py | 24 +- monocle/notification.py | 33 +- monocle/overseer.py | 124 ++-- monocle/sanitized.py | 100 ++-- monocle/spawns.py | 159 +++-- monocle/static/js/main.js | 859 ++++++++++++++-------------- monocle/utils.py | 125 +++- monocle/web_utils.py | 109 +++- monocle/worker.py | 248 +++++--- optional-requirements.txt | 4 +- requirements.txt | 10 +- scan.py | 6 +- scripts/pickle_landmarks.example.py | 35 ++ scripts/test_notifications.py | 2 +- setup.py | 23 +- solve_captchas.py | 26 +- web.py | 66 +-- web_sanic.py | 92 +-- 26 files changed, 1714 insertions(+), 1131 deletions(-) create mode 100644 scripts/pickle_landmarks.example.py diff --git a/.travis.yml b/.travis.yml index c76d57d3c..dbee6390d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,25 +12,21 @@ matrix: dist: trusty python: 3.6 language: python - - os: osx - osx_image: xcode6.4 - language: generic - os: osx osx_image: xcode7.3 language: generic - os: osx - osx_image: xcode8.3 + osx_image: xcode8.2 language: generic before_install: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update && brew install python3 || brew upgrade python3; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update && brew install python3; fi install: - pip3 install -r requirements.txt - - cp config.example.py monocle/config.py - python3 setup.py install script: - cp accounts.example.csv accounts.csv - python3 scripts/create_db.py - - python3 -c 'from monocle import avatar, bounds, db_proc, db, landmarks, names, notification, overseer, sanitized, shared, spawns, utils, web_utils, worker' + - python3 -c 'from monocle import avatar, bounds, db_proc, db, names, notification, overseer, sanitized, shared, spawns, utils, web_utils, worker' diff --git a/README.md b/README.md index cc102f6b4..8fd6698a6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Monocle -[![Build Status](https://travis-ci.org/Noctem/Monocle.svg)](https://travis-ci.org/Noctem/Monocle) +[![Build Status](https://travis-ci.org/Noctem/Monocle.svg?branch=develop)](https://travis-ci.org/Noctem/Monocle) Monocle is the distinguished Pokémon Go scanner capable of scanning large areas for spawns. Features spawnpoint scanning, Twitter and PushBullet notifications, accurate expiration times and estimates based on historical data, pokestop and gym collection, a CAPTCHA solving script, and more. @@ -53,6 +53,7 @@ Since it uses [Leaflet](http://leafletjs.com/) for mapping, the appearance and d * *asyncpushbullet* is required for PushBullet notifications * *peony-twitter* is required for Twitter notifications * *gpsoauth* is required for logging in to Google accounts + * *shapely* is required for landmarks or boundary polygons * *selenium* (and [ChromeDriver](https://sites.google.com/a/chromium.org/chromedriver/)) are required for manually solving CAPTCHAs * *uvloop* provides better event loop performance * *pycairo* is required for generating IV/move images diff --git a/config.example.py b/config.example.py index 2bc4dfb7a..99eae37d3 100644 --- a/config.example.py +++ b/config.example.py @@ -1,4 +1,4 @@ -### All lines that are commented out (and many that aren't) are optional ### +### All lines that are commented out (and some that aren't) are optional ### DB_ENGINE = 'sqlite:///db.sqlite' #DB_ENGINE = 'mysql://user:pass@localhost/monocle' @@ -7,13 +7,13 @@ AREA_NAME = 'SLC' # the city or region you are scanning LANGUAGE = 'EN' # ISO 639-1 codes EN, DE, ES, FR, IT, JA, KO, PT, or ZH for Pokémon/move names MAX_CAPTCHAS = 100 # stop launching new visits if this many CAPTCHAs are pending -SCAN_DELAY = 10.0 # wait at least this many seconds before scanning with the same account +SCAN_DELAY = 10 # wait at least this many seconds before scanning with the same account SPEED_UNIT = 'miles' # valid options are 'miles', 'kilometers', 'meters' SPEED_LIMIT = 19.5 # limit worker speed to this many SPEED_UNITs per hour # The number of simultaneous workers will be these two numbers multiplied. # On the initial run, workers will arrange themselves in a grid across the -# the boundaries you define below. +# rectangle you defined with MAP_START and MAP_END. # The rows/columns will also be used for the dot grid in the console output. # Provide more accounts than the product of your grid to allow swapping. GRID = (4, 4) # rows, columns @@ -22,35 +22,22 @@ # any spawn points have been discovered MAP_START = (40.7913, -111.9398) MAP_END = (40.7143, -111.8046) + # do not visit spawn points outside of your MAP_START and MAP_END rectangle +# the boundaries will be the rectangle created by MAP_START and MAP_END, unless STAY_WITHIN_MAP = True -## alternatively define a polygon to use as boundaries -## must be a tuple of tuples (containing coordinates for vertices) -## if BOUNDARIES is set, MAP_START, MAP_END, and STAY_WITHIN_MAP will be ignored -#BOUNDARIES = ((40.799609, -111.948556), (40.792749, -111.887341), (40.779264, -111.838078), (40.761410, -111.817908), (40.728636, -111.805293), (40.688833, -111.785564), (40.689768, -111.919389), (40.750461, -111.949938)) - -## alternatively define multiple polygons to use as boundaries -## must be a tuple of tuples of tuples (containing coordinates for vertices) -#MULTI_BOUNDARIES = (((40.252083, -111.654868), (40.24589, -111.65413), (40.2454018, -111.64340), (40.252509, -111.64268)), ((40.2388, -111.643066), (40.23894, -111.63165), (40.23426, -111.63311), (40.2354, -111.64014))) - -## if using BOUNDARIES or MULTI_BOUNDARIES you may define polygonal holes -## workers will stay out of these holes as if they were out of bounds -## must be a tuple of tuples of tuples -# for only one hole do a tuple of tuples and a trailing comma, like so: -#HOLES = ((40.795, -111.94), (40.79, -111.88), (40.77, -111.83)), - # ensure that you visit within this many meters of every part of your map during bootstrap # lower values are more thorough but will take longer BOOTSTRAP_RADIUS = 120 -GIVE_UP_KNOWN = 75.0 # try to find a worker for a known spawn for this many seconds before giving up -GIVE_UP_UNKNOWN = 60.0 # try to find a worker for an unknown point for this many seconds before giving up -SKIP_SPAWN = 90.0 # don't even try to find a worker for a spawn if the spawn time was more than this many seconds ago +GIVE_UP_KNOWN = 75 # try to find a worker for a known spawn for this many seconds before giving up +GIVE_UP_UNKNOWN = 60 # try to find a worker for an unknown point for this many seconds before giving up +SKIP_SPAWN = 90 # don't even try to find a worker for a spawn if the spawn time was more than this many seconds ago # How often should the mystery queue be reloaded (default 90s) # this will reduce the grouping of workers around the last few mysteries -#RESCAN_UNKNOWN = 90.0 +#RESCAN_UNKNOWN = 90 # filename of accounts CSV ACCOUNTS_CSV = 'accounts.csv' @@ -71,11 +58,18 @@ # Immediately select workers whose speed are below (SPEED_UNIT)p/h instead of # continuing to try to find the worker with the lowest speed. # May increase clustering if you have a high density of workers. -GOOD_ENOUGH = 1.0 +GOOD_ENOUGH = 0.1 # Seconds to sleep after failing to find an eligible worker before trying again. SEARCH_SLEEP = 2.5 +## alternatively define a Polygon to use as boundaries (requires shapely) +## if BOUNDARIES is set, STAY_WITHIN_MAP will be ignored +## more information available in the shapely manual: +## http://toblerity.org/shapely/manual.html#polygons +#from shapely.geometry import Polygon +#BOUNDARIES = Polygon(((40.799609, -111.948556), (40.792749, -111.887341), (40.779264, -111.838078), (40.761410, -111.817908), (40.728636, -111.805293), (40.688833, -111.785564), (40.689768, -111.919389), (40.750461, -111.949938))) + # key for Bossland's hashing server, otherwise the old hashing lib will be used #HASH_KEY = '9d87af14461b93cb3605' # this key is fake @@ -91,7 +85,7 @@ # Defaults to whatever will allow every worker to be swapped within 6 hours #SWAP_OLDEST = 300 # 5 minutes # Only swap if it's been active for more than x minutes -#MINIMUM_RUNTIME = 10.0 +#MINIMUM_RUNTIME = 10 ### these next 6 options use more requests but look more like the real client APP_SIMULATION = True # mimic the actual app's login requests @@ -105,11 +99,11 @@ # 'notifying' will encounter Pokémon that are eligible for notifications # None will never encounter Pokémon ENCOUNTER = None -#ENCOUNTER_IDS = {3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134} +#ENCOUNTER_IDS = (3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134) # PokéStops SPIN_POKESTOPS = True # spin all PokéStops that are within range -SPIN_COOLDOWN = 300.0 # spin only one PokéStop every n seconds +SPIN_COOLDOWN = 300 # spin only one PokéStop every n seconds (default 300) # minimum number of each item to keep if the bag is cleaned # bag cleaning is disabled if this is not present or is commented out @@ -135,7 +129,7 @@ # Update the console output every x seconds REFRESH_RATE = 0.75 # 750ms # Update the seen/speed/visit/speed stats every x seconds -STAT_REFRESH = 5.0 +STAT_REFRESH = 5 # sent with GET_PLAYER requests, should match your region PLAYER_LOCALE = {'country': 'US', 'language': 'en', 'timezone': 'America/Denver'} @@ -146,17 +140,20 @@ # number of seconds before timing out on a login request LOGIN_TIMEOUT = 2.5 +# add spawn points reported in cell_ids to the unknown spawns list +#MORE_POINTS = False + # Set to True to kill the scanner when a newer version is forced #FORCED_KILL = False # exclude these Pokémon from the map by default (only visible in trash layer) -TRASH_IDS = { +TRASH_IDS = ( 16, 19, 21, 29, 32, 41, 46, 48, 50, 52, 56, 74, 77, 96, 111, 133, 161, 163, 167, 177, 183, 191, 194 -} +) # include these Pokémon on the "rare" report -RARE_IDS = {3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134} +RARE_IDS = (3, 6, 9, 45, 62, 71, 80, 85, 87, 89, 91, 94, 114, 130, 131, 134) from datetime import datetime REPORT_SINCE = datetime(2017, 2, 17) # base reports on data from after this date @@ -164,14 +161,13 @@ # used for altitude queries and maps in reports #GOOGLE_MAPS_KEY = 'OYOgW1wryrp2RKJ81u7BLvHfYUA6aArIyuQCXu4' # this key is fake REPORT_MAPS = True # Show maps on reports +#ALT_RANGE = (1250, 1450) # Fall back to altitudes in this range if Google query fails -## S2 cell level to fetch altitudes for -## Higher levels will lead to a larger cache and more Google Elevation API requests -## Average diameter of some levels: -## 9: 17.85km, 10: 8.93km, 11: 4.46km, 12: 2.23km, 13: 1.12km, 14: 558m, 15: 279m -#ALT_LEVEL = 13 - -#ALT_RANGE = (390.0, 490.0) # Fall back to altitudes in this range if Google query fails +## Round altitude coordinates to this many decimal places +## More precision will lead to larger caches and more Google API calls +## Maximum distance from coords to rounded coords for precisions (at Lat40): +## 1: 7KM, 2: 700M, 3: 70M, 4: 7M +#ALT_PRECISION = 2 ## Automatically resolve captchas using 2Captcha key. #CAPTCHA_KEY = '1abc234de56fab7c89012d34e56fa7b8' @@ -183,7 +179,7 @@ # allow displaying the live location of workers on the map MAP_WORKERS = True # filter these Pokemon from the map to reduce traffic and browser load -#MAP_FILTER_IDS = (16, 19, 161, 165, 167) +#MAP_FILTER_IDS = [161, 165, 16, 19, 167] # unix timestamp of last spawn point migration, spawn times learned before this will be ignored LAST_MIGRATION = 1481932800 # Dec. 17th, 2016 @@ -212,9 +208,12 @@ #MANAGER_ADDRESS = ('127.0.0.1', 5002) # could be used for CAPTCHA solving and live worker maps on remote systems # Store the cell IDs so that they don't have to be recalculated every visit. -# Enabling will increase memory usage. +# Enabling will (potentially drastically) increase memory usage. #CACHE_CELLS = False +# Only for use with web_sanic (requires PostgreSQL) +#DB = {'host': '127.0.0.1', 'user': 'monocle_role', 'password': 'pik4chu', 'port': '5432', 'database': 'monocle'} + # Disable to use Python's event loop even if uvloop is installed #UVLOOP = True @@ -250,7 +249,7 @@ #TZ_OFFSET = 0 # UTC offset in hours (if different from system time) # the required number of seconds remaining to notify about a Pokémon -TIME_REQUIRED = 600.0 # 10 minutes +TIME_REQUIRED = 600 # 10 minutes ### Only set either the NOTIFY_RANKING or NOTIFY_IDS, not both! # The (x) rarest Pokémon will be eligible for notification. Whether a @@ -286,7 +285,7 @@ # Pokémon scores are an average of the Pokémon's rarity score and IV score (from 0 to 1) # If NOTIFY_RANKING is 90, the 90th most common Pokémon will have a rarity of score 0, the rarest will be 1. # IV score is the IV sum divided by 45 (perfect IVs). -FULL_TIME = 1800.0 # the number of seconds after a notification when only MINIMUM_SCORE will be required +FULL_TIME = 1800 # the number of seconds after a notification when only MINIMUM_SCORE will be required INITIAL_SCORE = 0.7 # the required score immediately after a notification MINIMUM_SCORE = 0.4 # the required score after FULL_TIME seconds have passed @@ -314,8 +313,18 @@ ##### Referencing landmarks in your tweets/notifications -## Appended to OpenStreetMap queries if a query and query_suffix aren't provided -QUERY_SUFFIX = 'Salt Lake City' + +#### It is recommended to store the LANDMARKS object in a pickle to reduce startup +#### time if you are using queries. An example script for this is in: +#### scripts/pickle_landmarks.example.py +#from pickle import load +#with open('pickles/landmarks.pickle', 'rb') as f: +# LANDMARKS = load(f) + +### if you do pickle it, just load the pickle and omit everything below this point + +#from monocle.landmarks import Landmarks +#LANDMARKS = Landmarks(query_suffix=AREA_NAME) # Landmarks to reference when Pokémon are nearby # If no points are specified then it will query OpenStreetMap for the coordinates @@ -329,18 +338,20 @@ # When selecting a landmark, non-areas will be chosen first if any are close enough # the default phrase is 'in' for areas and 'at' for non-areas, but can be overriden for either. -''' # these triple quotes are block comments, remove them to fill in your own landmarks -LANDMARKS = ( - # since no points or query is provided, the names provided will be queried and suffixed with QUERY_SUFFIX - {'name': 'Rice Eccles Stadium', 'shortname': 'Rice Eccles', 'hashtags': ['Utes']}, - {'name': 'the Salt Lake Temple', 'shortname': 'the temple', 'hashtags': ['TempleSquare']}, - # provide two corner points to create a square for this area - {'name': 'City Creek Center', 'points': ((40.769210, -111.893901), (40.767231, -111.888275)), 'hashtags': ['CityCreek']}, - # provide a query that is different from the landmark name so that OpenStreetMap finds the correct one - {'name': 'the State Capitol', 'shortname': 'the Capitol', 'query': 'Utah State Capitol Building'}, - # query using name, override the default area phrase so that it says 'at (name)' instead of 'in' - {'name': 'the University of Utah', 'shortname': 'the U of U', 'hashtags': ['Utes'], 'phrase': 'at', 'is_area': True}, - # provide corner points to create a polygon of the area since OpenStreetMap does not have a shape for it - {'name': 'Yalecrest', 'points': ((40.7502, -111.8365), (40.7503, -111.8511), (40.7515, -111.8538), (40.7412, -111.8539), (40.7411, -111.8365)), 'is_area': True} -) -''' +### replace these with well-known places in your area + +## since no points or query is provided, the names provided will be queried and suffixed with AREA_NAME +#LANDMARKS.add('Rice Eccles Stadium', shortname='Rice Eccles', hashtags={'Utes'}) +#LANDMARKS.add('the Salt Lake Temple', shortname='the temple', hashtags={'TempleSquare'}) + +## provide two corner points to create a square for this area +#LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) + +## provide a query that is different from the landmark name so that OpenStreetMap finds the correct one +#LANDMARKS.add('the State Capitol', shortname='the Capitol', query='Utah State Capitol Building') + +### area examples ### +## query using name, override the default area phrase so that it says 'at (name)' instead of 'in' +#LANDMARKS.add('the University of Utah', shortname='the U of U', hashtags={'Utes'}, phrase='at', is_area=True) +## provide corner points to create a polygon of the area since OpenStreetMap does not have a shape for it +#LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) diff --git a/monocle/__init__.py b/monocle/__init__.py index 71c61ba12..8e78de905 100644 --- a/monocle/__init__.py +++ b/monocle/__init__.py @@ -1,14 +1,5 @@ __title__ = 'monocle' -__version__ = '0.8b2' +__version__ = '0.8b1' __author__ = 'David Christenson' __license__ = 'MIT License' __copyright__ = 'Copyright (c) 2017 David Christenson ' - -from . import sanitized - -if sanitized.SPAWN_ID_INT: - from pogeo import cellid_to_location as spawnid_to_loc - from pogeo import cellid_to_coords as spawnid_to_coords -else: - from pogeo import token_to_location as spawnid_to_loc - from pogeo import token_to_coords as spawnid_to_coords diff --git a/monocle/altitudes.py b/monocle/altitudes.py index 36c99a83d..1fe3d5ff3 100644 --- a/monocle/altitudes.py +++ b/monocle/altitudes.py @@ -1,41 +1,167 @@ -from os.path import join +import sys -from pogeo.altitude import AltitudeCache +from asyncio import gather, CancelledError +from statistics import mean + +from aiohttp import ClientSession +from polyline import encode as polyencode +from aiopogo import json_loads +from cyrandom import uniform from . import bounds, sanitized as conf -from .shared import get_logger +from .shared import get_logger, LOOP, run_threaded +from .utils import dump_pickle, float_range, load_pickle, round_coords + +class Altitudes: + """Manage altitudes""" + __slots__ = ('altitudes', 'changed', 'fallback', 'log', 'mean') -log = get_logger('altitudes') + def __init__(self): + self.log = get_logger('altitudes') + self.changed = False + self.load() + if len(self.altitudes) > 5: + self.fallback = self.average + else: + self.fallback = self.random + async def get_all(self): + self.log.info('Fetching all altitudes') -ALTITUDES = AltitudeCache(conf.ALT_LEVEL, conf.GOOGLE_MAPS_KEY, conf.ALT_RANGE[0], conf.ALT_RANGE[1]) + coords = self.get_coords() -set_altitude = ALTITUDES.set_alt + async with ClientSession(loop=LOOP) as session: + if len(coords) < 300: + await self.fetch_alts(coords, session) + else: + tasks = [self.fetch_alts(chunk, session) + for chunk in self.chunks(coords)] + await gather(*tasks, loop=LOOP) + self.changed = True + LOOP.create_task(run_threaded(self.pickle)) + async def fetch_alts(self, coords, session, precision=conf.ALT_PRECISION): + try: + async with session.get( + 'https://maps.googleapis.com/maps/api/elevation/json', + params={'locations': 'enc:' + polyencode(coords), + 'key': conf.GOOGLE_MAPS_KEY}, + timeout=10) as resp: + response = await resp.json(loads=json_loads) + for r in response['results']: + coords = round_coords((r['location']['lat'], r['location']['lng']), precision) + self.altitudes[coords] = r['elevation'] + if not self.altitudes: + self.log.error(response['error_message']) + except Exception: + self.log.exception('Error fetching altitudes.') -def load_alts(): - pickle_path = join(conf.DIRECTORY, 'pickles', 'altcache.pickle') - try: - unpickled = ALTITUDES.unpickle(pickle_path, bounds) - except (FileNotFoundError, EOFError): - unpickled = False - except Exception: - unpickled = False - log.exception('Error while trying to unpickle altitudes.') + def get(self, point, randomize=uniform): + point = round_coords(point, conf.ALT_PRECISION) + alt = self.altitudes[point] + return randomize(alt - 2.5, alt + 2.5) - if not unpickled: + async def fetch(self, point, key=conf.GOOGLE_MAPS_KEY): + if not key: + return self.fallback() try: - ALTITUDES.fetch_all(bounds) + async with ClientSession(loop=LOOP) as session: + async with session.get( + 'https://maps.googleapis.com/maps/api/elevation/json', + params={'locations': '{0[0]},{0[1]}'.format(point), + 'key': key}, + timeout=10) as resp: + response = await resp.json(loads=json_loads) + altitude = response['results'][0]['elevation'] + self.altitudes[point] = altitude + self.changed = True + return altitude + except CancelledError: + raise except Exception: - log.exception('Error while fetching altitudes.') - if ALTITUDES: - log.warning('{} altitudes fetched.', len(ALTITUDES)) try: - ALTITUDES.pickle(pickle_path) - except Exception: - log.exception('Error while dumping altitude pickle.') - else: - log.warning('No altitudes fetched, will use random values within ALT_RANGE.') - global set_altitude - set_altitude = ALTITUDES.set_random + self.log.error(response['error_message']) + except (KeyError, NameError): + self.log.error('Error fetching altitude for {}.', point) + return self.fallback() + + def average(self, randomize=uniform): + self.log.info('Fell back to average altitude.') + try: + return randomize(self.mean - 15.0, self.mean + 15.0) + except AttributeError: + self.mean = mean(self.altitudes.values()) + return self.average() + + def random(self, alt_range=conf.ALT_RANGE, randomize=uniform): + self.log.info('Fell back to random altitude.') + return randomize(*conf.ALT_RANGE) + + def load(self): + try: + state = load_pickle('altitudes', raise_exception=True) + except FileNotFoundError: + self.log.info('No altitudes pickle found.') + self.altitudes = {} + LOOP.run_until_complete(self.get_all()) + return + + if state['bounds_hash'] == hash(bounds): + if state['precision'] == conf.ALT_PRECISION and state['altitudes']: + self.altitudes = state['altitudes'] + return + elif state['precision'] < conf.ALT_PRECISION: + self.altitudes = state['altitudes'] + LOOP.run_until_complete(self.get_all()) + return + elif state['precision'] <= conf.ALT_PRECISION: + pickled_alts = state['altitudes'] + + to_remove = [] + for coords in pickled_alts.keys(): + if coords not in bounds: + to_remove.append(coords) + for key in to_remove: + del pickled_alts[key] + + self.altitudes = pickled_alts + LOOP.run_until_complete(self.get_all()) + return + self.altitudes = {} + LOOP.run_until_complete(self.get_all()) + + def pickle(self): + if self.changed: + state = { + 'altitudes': self.altitudes, + 'precision': conf.ALT_PRECISION, + 'bounds_hash': hash(bounds) + } + dump_pickle('altitudes', state) + self.changed = False + + def get_coords(self, bounds=bounds, precision=conf.ALT_PRECISION): + coords = [] + if bounds.multi: + for b in bounds.polygons: + coords.extend(self.get_coords(b)) + return coords + step = 1 / (10 ** precision) + west, east = bounds.west, bounds.east + existing = self.altitudes.keys() if self.altitudes else False + for lat in float_range(bounds.south, bounds.north, step): + for lon in float_range(west, east, step): + point = lat, lon + if not existing or point not in existing: + coords.append(round_coords(point, precision)) + return coords + + @staticmethod + def chunks(l, n=300): + """Yield successive n-sized chunks from l.""" + for i in range(0, len(l), n): + yield l[i:i + n] + + +sys.modules[__name__] = Altitudes() diff --git a/monocle/bounds.py b/monocle/bounds.py index c7472c8a3..9a3117cd7 100644 --- a/monocle/bounds.py +++ b/monocle/bounds.py @@ -4,20 +4,91 @@ from .utils import get_distance -if conf.MULTI_BOUNDARIES: - from pogeo import Polygon +class Bounds: + def __init__(self): + self.north = max(conf.MAP_START[0], conf.MAP_END[0]) + self.south = min(conf.MAP_START[0], conf.MAP_END[0]) + self.east = max(conf.MAP_START[1], conf.MAP_END[1]) + self.west = min(conf.MAP_START[1], conf.MAP_END[1]) + self.center = ((self.north + self.south) / 2, + (self.west + self.east) / 2) + self.multi = False - sys.modules[__name__] = Polygon(conf.MULTI_BOUNDARIES, conf.HOLES) -elif conf.BOUNDARIES: - if conf.HOLES: - from pogeo import Polygon + def __bool__(self): + """Are boundaries a polygon?""" + return False - sys.modules[__name__] = Polygon(conf.BOUNDARIES, conf.HOLES) - else: - from pogeo import Loop + def __contains__(self, p): + return True - sys.modules[__name__] = Loop(conf.BOUNDARIES) -else: - from pogeo import Rectangle + def __hash__(self): + return 0 + + @property + def area(self): + """Returns the square kilometers for configured scan area""" + width = get_distance((self.center[0], self.west), (self.center[0], self.east), 2) + height = get_distance((self.south, 0), (self.north, 0), 2) + return round(width * height) + + +class PolyBounds(Bounds): + def __init__(self, polygon=conf.BOUNDARIES): + self.boundaries = prep(polygon) + self.south, self.west, self.north, self.east = polygon.bounds + self.center = polygon.centroid.coords[0] + self.multi = False + self.polygon = polygon + + def __bool__(self): + """Are boundaries a polygon?""" + return True + + def __contains__(self, p): + return self.boundaries.contains(Point(p)) + + def __hash__(self): + return hash((self.south, self.west, self.north, self.east)) + + +class MultiPolyBounds(PolyBounds): + def __init__(self): + super().__init__() + self.multi = True + self.polygons = [PolyBounds(polygon) for polygon in self.polygon] - sys.modules[__name__] = Rectangle(conf.MAP_START, conf.MAP_END, conf.STAY_WITHIN_MAP) + def __hash__(self): + return hash(tuple(hash(x) for x in self.polygons)) + + @property + def area(self): + return sum(x.area for x in self.polygons) + + +class RectBounds(Bounds): + def __contains__(self, p): + lat, lon = p + return (self.south <= lat <= self.north and + self.west <= lon <= self.east) + + def __hash__(self): + return hash((self.north, self.east, self.south, self.west)) + + +if conf.BOUNDARIES: + try: + from shapely.geometry import MultiPolygon, Point, Polygon + from shapely.prepared import prep + except ImportError as e: + raise ImportError('BOUNDARIES is set but shapely is not available.') from e + + if isinstance(conf.BOUNDARIES, Polygon): + sys.modules[__name__] = PolyBounds() + elif isinstance(conf.BOUNDARIES, MultiPolygon): + sys.modules[__name__] = MultiPolyBounds() + else: + raise TypeError('BOUNDARIES must be a shapely Polygon.') +elif conf.STAY_WITHIN_MAP: + sys.modules[__name__] = RectBounds() +else: + sys.modules[__name__] = Bounds() diff --git a/monocle/db.py b/monocle/db.py index 01b58e544..f1d1eeb28 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -3,19 +3,16 @@ from contextlib import contextmanager from enum import Enum from time import time, mktime -from hashlib import sha256 from sqlalchemy import Column, Integer, String, Float, SmallInteger, BigInteger, ForeignKey, UniqueConstraint, create_engine, cast, func, desc, asc, and_, exists from sqlalchemy.orm import sessionmaker, relationship from sqlalchemy.types import TypeDecorator, Numeric, Text from sqlalchemy.ext.declarative import declarative_base -from . import bounds, db_proc, spawns, spawnid_to_coords, spawnid_to_loc, sanitized as conf +from . import bounds, spawns, db_proc, sanitized as conf from .utils import time_until_time, dump_pickle, load_pickle from .shared import call_at, get_logger -contains_spawn = bounds.contains_cellid if conf.SPAWN_ID_INT else bounds.contains_token - try: assert conf.LAST_MIGRATION < time() except AssertionError: @@ -57,7 +54,7 @@ class TextInt(TypeDecorator): impl = Text def process_bind_param(self, value, dialect): - return repr(value) + return str(value) def process_result_value(self, value, dialect): return int(value) @@ -69,8 +66,6 @@ def process_result_value(self, value, dialect): ID_TYPE = BigInteger if conf.SPAWN_ID_INT else String(11) -DB_HASH = sha256(conf.DB_ENGINE.encode('utf-8')).digest() - class Team(Enum): none = 0 @@ -182,7 +177,7 @@ def __contains__(self, sighting): def pickle(self): state = self.__dict__.copy() - state['db_hash'] = DB_HASH + state['db_hash'] = spawns.db_hash state['bounds_hash'] = hash(bounds) dump_pickle('forts', state) @@ -190,7 +185,7 @@ def unpickle(self): try: state = load_pickle('forts', raise_exception=True) if all((state['class_version'] == self.class_version, - state['db_hash'] == DB_HASH, + state['db_hash'] == spawns.db_hash, state['bounds_hash'] == hash(bounds))): self.__dict__.update(state) except (FileNotFoundError, TypeError, KeyError): @@ -207,8 +202,9 @@ def unpickle(self): Session = sessionmaker(bind=_engine) DB_TYPE = _engine.name + if conf.REPORT_SINCE: - SINCE_TIME = int(mktime(conf.REPORT_SINCE.timetuple())) + SINCE_TIME = mktime(conf.REPORT_SINCE.timetuple()) SINCE_QUERY = 'WHERE expire_timestamp > {}'.format(SINCE_TIME) else: SINCE_QUERY = '' @@ -222,6 +218,8 @@ class Sighting(Base): spawn_id = Column(ID_TYPE) expire_timestamp = Column(Integer, index=True) encounter_id = Column(HUGE_TYPE, index=True) + lat = Column(FLOAT_TYPE) + lon = Column(FLOAT_TYPE) atk_iv = Column(TINY_TYPE) def_iv = Column(TINY_TYPE) sta_iv = Column(TINY_TYPE) @@ -244,6 +242,8 @@ class Mystery(Base): pokemon_id = Column(TINY_TYPE) spawn_id = Column(ID_TYPE, index=True) encounter_id = Column(HUGE_TYPE, index=True) + lat = Column(FLOAT_TYPE) + lon = Column(FLOAT_TYPE) first_seen = Column(Integer, index=True) first_seconds = Column(SmallInteger) last_seconds = Column(SmallInteger) @@ -269,6 +269,8 @@ class Spawnpoint(Base): id = Column(Integer, primary_key=True) spawn_id = Column(ID_TYPE, unique=True, index=True) despawn_time = Column(SmallInteger, index=True) + lat = Column(FLOAT_TYPE) + lon = Column(FLOAT_TYPE) updated = Column(Integer, index=True) duration = Column(TINY_TYPE) failures = Column(TINY_TYPE) @@ -324,40 +326,38 @@ def session_scope(autoflush=False): try: yield session session.commit() - except Exception: + except: session.rollback() raise finally: session.close() -def add_sighting(session, pokemon, _cache=SIGHTING_CACHE, _encounter=conf.ENCOUNTER): +def add_sighting(session, pokemon): # Check if there isn't the same entry already - if pokemon in _cache: + if pokemon in SIGHTING_CACHE: return if session.query(exists().where(and_( Sighting.expire_timestamp == pokemon['expire_timestamp'], Sighting.encounter_id == pokemon['encounter_id'])) ).scalar(): - _cache.add(pokemon) + SIGHTING_CACHE.add(pokemon) return obj = Sighting( pokemon_id=pokemon['pokemon_id'], spawn_id=pokemon['spawn_id'], encounter_id=pokemon['encounter_id'], expire_timestamp=pokemon['expire_timestamp'], + lat=pokemon['lat'], + lon=pokemon['lon'], + atk_iv=pokemon.get('individual_attack'), + def_iv=pokemon.get('individual_defense'), + sta_iv=pokemon.get('individual_stamina'), + move_1=pokemon.get('move_1'), + move_2=pokemon.get('move_2') ) - if _encounter: - try: - obj.atk_iv = pokemon['individual_attack'] - obj.def_iv = pokemon['individual_defense'] - obj.sta_iv = pokemon['individual_stamina'] - obj.move_1 = pokemon['move_1'] - obj.move_2 = pokemon['move_2'] - except KeyError: - pass session.add(obj) - _cache.add(pokemon) + SIGHTING_CACHE.add(pokemon) def add_spawnpoint(session, pokemon): @@ -373,7 +373,8 @@ def add_spawnpoint(session, pokemon): .filter(Spawnpoint.spawn_id == spawn_id) \ .first() now = round(time()) - spawns.add_known(spawn_id, new_time) + point = pokemon['lat'], pokemon['lon'] + spawns.add_known(spawn_id, new_time, point) if existing: existing.updated = now existing.failures = 0 @@ -395,6 +396,8 @@ def add_spawnpoint(session, pokemon): session.add(Spawnpoint( spawn_id=spawn_id, despawn_time=new_time, + lat=pokemon['lat'], + lon=pokemon['lon'], updated=now, duration=duration, failures=0 @@ -404,22 +407,27 @@ def add_spawnpoint(session, pokemon): def add_mystery_spawnpoint(session, pokemon): # Check if the same entry already exists spawn_id = pokemon['spawn_id'] - if spawn_id in spawns.unknown or session.query(exists().where( + point = pokemon['lat'], pokemon['lon'] + if point in spawns.unknown or session.query(exists().where( Spawnpoint.spawn_id == spawn_id)).scalar(): return session.add(Spawnpoint( spawn_id=spawn_id, + despawn_time=None, + lat=pokemon['lat'], + lon=pokemon['lon'], updated=0, + duration=None, failures=0 )) - if spawnid_to_loc(spawn_id) in bounds: - spawns.unknown.add(spawn_id) + if point in bounds: + spawns.add_unknown(point) -def add_mystery(session, pokemon, _cache=MYSTERY_CACHE, _encounter=conf.ENCOUNTER): - if pokemon in _cache: +def add_mystery(session, pokemon): + if pokemon in MYSTERY_CACHE: return add_mystery_spawnpoint(session, pokemon) existing = session.query(Mystery) \ @@ -435,22 +443,20 @@ def add_mystery(session, pokemon, _cache=MYSTERY_CACHE, _encounter=conf.ENCOUNTE pokemon_id=pokemon['pokemon_id'], spawn_id=pokemon['spawn_id'], encounter_id=pokemon['encounter_id'], + lat=pokemon['lat'], + lon=pokemon['lon'], first_seen=pokemon['seen'], first_seconds=seconds, last_seconds=seconds, - seen_range=0 + seen_range=0, + atk_iv=pokemon.get('individual_attack'), + def_iv=pokemon.get('individual_defense'), + sta_iv=pokemon.get('individual_stamina'), + move_1=pokemon.get('move_1'), + move_2=pokemon.get('move_2') ) - if _encounter: - try: - obj.atk_iv = pokemon['individual_attack'] - obj.def_iv = pokemon['individual_defense'] - obj.sta_iv = pokemon['individual_stamina'] - obj.move_1 = pokemon['move_1'] - obj.move_2 = pokemon['move_2'] - except KeyError: - pass session.add(obj) - _cache.add(pokemon) + MYSTERY_CACHE.add(pokemon) def add_fort_sighting(session, raw_fort): @@ -705,7 +711,7 @@ def get_rare_pokemon(session): result = [] for pokemon_id in conf.RARE_IDS: - query = session.query(Sighting.id) \ + query = session.query(Sighting) \ .filter(Sighting.pokemon_id == pokemon_id) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -725,7 +731,8 @@ def get_nonexistent_pokemon(session): def get_all_sightings(session, pokemon_ids): - query = session.query(Sighting.pokemon_id, Sighting.spawn_id) \ + # TODO: rename this and get_sightings + query = session.query(Sighting) \ .filter(Sighting.pokemon_id.in_(pokemon_ids)) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -768,7 +775,7 @@ def get_spawns_per_hour(session, pokemon_id): def get_total_spawns_count(session, pokemon_id): - query = session.query(Sighting.id) \ + query = session.query(Sighting) \ .filter(Sighting.pokemon_id == pokemon_id) if conf.REPORT_SINCE: query = query.filter(Sighting.expire_timestamp > SINCE_TIME) @@ -776,9 +783,9 @@ def get_total_spawns_count(session, pokemon_id): def get_all_spawn_coords(session, pokemon_id=None): - points = session.query(Sighting.spawn_id) + points = session.query(Sighting.lat, Sighting.lon) if pokemon_id: points = points.filter(Sighting.pokemon_id == int(pokemon_id)) if conf.REPORT_SINCE: points = points.filter(Sighting.expire_timestamp > SINCE_TIME) - return [spawnid_to_coords(x[0]) for x in points] + return points.all() diff --git a/monocle/landmarks.py b/monocle/landmarks.py index 915724fac..3d3ae640c 100644 --- a/monocle/landmarks.py +++ b/monocle/landmarks.py @@ -1,11 +1,16 @@ -from hashlib import sha256 from logging import getLogger -from aiopogo import json_dumps -from pogeo import Location, Loop, Rectangle -from pogeo.geocoder import geocode +from shapely.geometry import Point, Polygon, shape, box, LineString +from shapely import speedups +from geopy import Nominatim +from pogeo import get_distance -from .utils import dump_pickle, load_pickle +if speedups.available: + speedups.enable() + + +class FailedQuery(Exception): + """Raised when no location is found.""" class Landmark: @@ -21,23 +26,23 @@ def __init__(self, name, shortname=None, points=None, query=None, if not points and not query: query = name.lstrip('the ') - # append query suffix if it's not already present in query if ((query_suffix and query) and query_suffix.lower() not in query.lower()): query = '{} {}'.format(query, query_suffix) self.location = None if query: - self.location = geocode(query, self.log) + self.query_location(query) elif points: try: length = len(points) if length > 2: - self.location = Loop(points) + self.location = Polygon(points) elif length == 2: - self.location = Rectangle(*points) + self.location = box(points[0][0], points[0][1], + points[1][0], points[1][1]) elif length == 1: - self.location = Location(*points[0]) + self.location = Point(*points[0]) except TypeError: raise ValueError('points must be a list/tuple of lists/tuples' ' containing 2 coordinates each') @@ -45,9 +50,13 @@ def __init__(self, name, shortname=None, points=None, query=None, if not self.location: raise ValueError('No location provided for {}. Must provide' ' either points, or query.'.format(self.name)) + elif not isinstance(self.location, (Point, Polygon, LineString)): + raise NotImplementedError('{} is a {} which is not supported' + .format(self.name, self.location.type)) + self.south, self.west, self.north, self.east = self.location.bounds - # square kilometers - self.size = self.location.area + # very imprecise conversion to square meters + self.size = self.location.area * 12100000000 if phrase: self.phrase = phrase @@ -58,87 +67,155 @@ def __init__(self, name, shortname=None, points=None, query=None, self.hashtags = hashtags - def __repr__(self): - center = self.location if isinstance(self.location, Location) else self.location.center - return ''.format(self.name, center, type(self.location), self.size) - - def __contains__(self, loc): + def __contains__(self, coordinates): """determine if a point is within this object range""" - return loc in self.location + lat, lon = coordinates + if (self.south <= lat <= self.north and + self.west <= lon <= self.east): + return self.location.contains(Point(lat, lon)) + return False + + def query_location(self, query): + def swap_coords(geojson): + out = [] + for x in geojson: + if isinstance(x, list): + out.append(swap_coords(x)) + else: + return geojson[1], geojson[0] + return out + + nom = Nominatim() + try: + geo = nom.geocode(query=query, geometry='geojson', timeout=3).raw + geojson = geo['geojson'] + except (AttributeError, KeyError): + raise FailedQuery('Query for {} did not return results.'.format(query)) + self.log.info('Nominatim returned {} for {}'.format(geo['display_name'], query)) + geojson['coordinates'] = swap_coords(geojson['coordinates']) + self.location = shape(geojson) + + def get_coordinates(self): + if isinstance(self.location, Polygon): + return tuple(self.location.exterior.coordinates) + else: + return self.location.coords[0] - def generate_string(self, loc): - if loc in self.location: + def generate_string(self, coordinates): + if coordinates in self: return '{} {}'.format(self.phrase, self.name) - distance = self.location.distance(loc) + distance = self.distance_from_point(coordinates) if distance < 50 or (self.is_area and distance < 100): return '{} {}'.format(self.phrase, self.name) else: return '{:.0f} meters from {}'.format(distance, self.name) + def distance_from_point(self, coordinates): + point = Point(*coordinates) + if isinstance(self.location, Point): + nearest = self.location + else: + nearest = self.nearest_point(point) + return get_distance(coordinates, nearest.coords[0]) -class Landmarks: - __slots__ = ('points_of_interest', 'areas') - - def __init__(self, landmarks, query_suffix): - self.areas = [] - self.points_of_interest = [] - - sha = sha256( - json_dumps(landmarks, - ensure_ascii=False, - sort_keys=True).encode('utf-8') - ).digest() - - if not self.unpickle(sha): - for kwargs in landmarks: - if 'query_suffix' not in kwargs and 'query' not in kwargs: - kwargs['query_suffix'] = query_suffix - - landmark = Landmark(**kwargs) - if landmark.is_area: - self.areas.append(landmark) - else: - self.points_of_interest.append(landmark) + def nearest_point(self, point): + '''Find nearest point in geometry, measured from given point.''' + if isinstance(self.location, Polygon): + segs = self.pairs(self.location.exterior.coords) + elif isinstance(self.location, LineString): + segs = self.pairs(self.location.coords) + else: + raise NotImplementedError('project_point_to_object not implemented' + "for geometry type '{}'.".format( + self.location.type)) - self.pickle(sha) + nearest_point = None + min_dist = float("inf") - def __bool__(self): - return bool(self.points_of_interest or self.areas) + for seg_start, seg_end in segs: + line_start = Point(seg_start) + line_end = Point(seg_end) - def pickle(self, sha): - dump_pickle('landmarks', { - 'areas': self.areas, - 'points_of_interest': self.points_of_interest, - 'sha': sha}) + intersection_point = self.project_point_to_line( + point, line_start, line_end) + cur_dist = point.distance(intersection_point) - def unpickle(self, sha): - try: - pickled = load_pickle('landmarks', raise_exception=True) - if sha == pickled['sha']: - self.areas = pickled['areas'] - self.points_of_interest = pickled['points_of_interest'] - return True + if cur_dist < min_dist: + min_dist = cur_dist + nearest_point = intersection_point + return nearest_point + + @staticmethod + def pairs(lst): + """Iterate over a list in overlapping pairs.""" + i = iter(lst) + prev = next(i) + for item in i: + yield prev, item + prev = item + + @staticmethod + def project_point_to_line(point, line_start, line_end): + '''Find nearest point on a straight line, + measured from given point.''' + line_magnitude = line_start.distance(line_end) + + u = (((point.x - line_start.x) * (line_end.x - line_start.x) + + (point.y - line_start.y) * (line_end.y - line_start.y)) + / (line_magnitude ** 2)) + + # closest point does not fall within the line segment, + # take the shorter distance to an endpoint + if u < 0.00001 or u > 1: + ix = point.distance(line_start) + iy = point.distance(line_end) + if ix > iy: + return line_end else: - return False - except (FileNotFoundError, KeyError): - return False + return line_start + else: + ix = line_start.x + u * (line_end.x - line_start.x) + iy = line_start.y + u * (line_end.y - line_start.y) + return Point([ix, iy]) + + +class Landmarks: + + def __init__(self, query_suffix=None): + self.points_of_interest = set() + self.areas = set() + self.query_suffix = query_suffix + + def add(self, *args, **kwargs): + if ('query_suffix' not in kwargs) and self.query_suffix and ( + 'query' not in kwargs): + kwargs['query_suffix'] = self.query_suffix + landmark = Landmark(*args, **kwargs) + if landmark.is_area: + self.areas.add(landmark) + else: + self.points_of_interest.add(landmark) + if landmark.size < 1: + print(landmark.name, type(landmark.location), '\n') + else: + print(landmark.name, landmark.size, type(landmark.location), '\n') def find_landmark(self, coords, max_distance=750): - landmark = self.find_within(self.points_of_interest, coords) + landmark = find_within(self.points_of_interest, coords) if landmark: return landmark - landmark, distance = self.find_closest(self.points_of_interest, coords) + landmark, distance = find_closest(self.points_of_interest, coords) try: if distance < max_distance: return landmark except TypeError: pass - area = self.find_within(self.areas, coords) + area = find_within(self.areas, coords) if area: return area - area, area_distance = self.find_closest(self.areas, coords) + area, area_distance = find_closest(self.areas, coords) try: if area and area_distance < distance: @@ -148,37 +225,37 @@ def find_landmark(self, coords, max_distance=750): except TypeError: return area - @staticmethod - def find_within(landmarks, coordinates): - within = [landmark for landmark in landmarks if coordinates in landmark] - found = len(within) - if found == 1: - return within[0] - if found: - landmarks = iter(within) - smallest = next(landmarks) - smallest_size = landmark.size - for landmark in landmarks: - if landmark.size < smallest_size: - smallest = landmark - smallest_size = landmark.size - return smallest - return None - @staticmethod - def find_closest(landmarks, coordinates): - landmarks = iter(landmarks) - try: - closest_landmark = next(landmarks) - except StopIteration: - return None, None - shortest_distance = closest_landmark.location.distance(coordinates) +def find_within(landmarks, coordinates): + within = [landmark for landmark in landmarks if coordinates in landmark] + found = len(within) + if found == 1: + return within[0] + if found: + landmarks = iter(within) + smallest = next(landmarks) + smallest_size = landmark.size for landmark in landmarks: - distance = landmark.location.distance(coordinates) - if distance <= shortest_distance: - if (distance == shortest_distance - and landmark.size > closest_landmark.size): - continue - shortest_distance = distance - closest_landmark = landmark - return closest_landmark, shortest_distance + if landmark.size < smallest_size: + smallest = landmark + smallest_size = landmark.size + return smallest + return None + + +def find_closest(landmarks, coordinates): + landmarks = iter(landmarks) + try: + closest_landmark = next(landmarks) + except StopIteration: + return None, None + shortest_distance = closest_landmark.distance_from_point(coordinates) + for landmark in landmarks: + distance = landmark.distance_from_point(coordinates) + if distance <= shortest_distance: + if (distance == shortest_distance + and landmark.size > closest_landmark.size): + continue + shortest_distance = distance + closest_landmark = landmark + return closest_landmark, shortest_distance diff --git a/monocle/names.py b/monocle/names.py index 1851d2297..b84625685 100644 --- a/monocle/names.py +++ b/monocle/names.py @@ -4,7 +4,7 @@ language = conf.LANGUAGE.upper()[:2] -POKEMON = { +POKEMON = defaultdict(lambda: '?', { 1: 'Bulbasaur', 2: 'Ivysaur', 3: 'Venusaur', @@ -256,7 +256,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' -} +}) MOVES = defaultdict(lambda: '?', { 1: 'Thunder Shock', @@ -483,7 +483,7 @@ if language == 'EN': pass elif language == 'DE': - POKEMON = { + POKEMON = defaultdict(lambda: '?', { 1: 'Bisasam', 2: 'Bisaknosp', 3: 'Bisaflor', @@ -735,7 +735,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' - } + }) MOVES = defaultdict(lambda: '?', { 1: 'Donnerschock', @@ -959,7 +959,7 @@ 281: 'Kraftreserve' }) elif language == 'FR': - POKEMON = { + POKEMON = defaultdict(lambda: '?', { 1: 'Bulbizarre', 2: 'Herbizarre', 3: 'Florizarre', @@ -1211,7 +1211,7 @@ 249: 'Lugia', 250: 'Ho-Oh', 251: 'Celebi' - } + }) MOVES = defaultdict(lambda: '?', { 1: 'Éclair', @@ -1435,7 +1435,7 @@ 281: 'Puissance Cachée' }) elif language == 'ZH': - POKEMON = { + POKEMON = defaultdict(lambda: '?', { 1: '妙蛙种子', 2: '妙蛙草', 3: '妙蛙花', @@ -1687,7 +1687,7 @@ 249: '洛奇亚', 250: '凤王', 251: '雪拉比' - } + }) MOVES = defaultdict(lambda: '?', { 1: '電擊', @@ -1911,7 +1911,7 @@ 281: '覺醒力量' }) elif language == 'JA': - POKEMON = { + POKEMON = defaultdict(lambda: '?', { 1: 'フシギダネ', 2: 'フシギソウ', 3: 'フシギバナ', @@ -2163,7 +2163,7 @@ 249: 'ルギア', 250: 'ホウオウ', 251: 'セレビィ' - } + }) MOVES = defaultdict(lambda: '?', { 1: 'でんきショック', @@ -2833,7 +2833,7 @@ 281: 'Introforza' }) elif language == 'KO': - POKEMON = { + POKEMON = defaultdict(lambda: '?', { 1: '이상해씨', 2: '이상해풀', 3: '이상해꽃', @@ -3085,7 +3085,7 @@ 249: '루기아', 250: '칠색조', 251: '세레비' - } + }) MOVES = defaultdict(lambda: '?', { 1: '전기쇼크', diff --git a/monocle/notification.py b/monocle/notification.py index f96ed637c..9bb2a77b4 100755 --- a/monocle/notification.py +++ b/monocle/notification.py @@ -8,14 +8,12 @@ from aiohttp import ClientError, ClientResponseError, ServerTimeoutError from aiopogo import json_dumps, json_loads -from pogeo import Location -from . import sanitized as conf +from .utils import load_pickle, dump_pickle from .db import session_scope, get_pokemon_ranking, estimate_remaining_time -from .landmarks import Landmarks from .names import MOVES, POKEMON from .shared import get_logger, SessionManager, LOOP, run_threaded -from .utils import load_pickle, dump_pickle +from . import sanitized as conf WEBHOOK = False @@ -228,13 +226,10 @@ def draw_name(self, pos, font=conf.NAME_FONT): class Notification: - landmarks = (Landmarks(conf.LANDMARKS, conf.QUERY_SUFFIX) - if conf.LANDMARKS and (TWITTER or PUSHBULLET) else None) - def __init__(self, pokemon, score, time_of_day): self.pokemon = pokemon self.name = POKEMON[pokemon['pokemon_id']] - self.coordinates = Location(pokemon['lat'], pokemon['lon']) + self.coordinates = pokemon['lat'], pokemon['lon'] self.score = score self.time_of_day = time_of_day self.log = get_logger('notifier') @@ -262,6 +257,11 @@ def __init__(self, pokemon, score, time_of_day): _tz = None now = datetime.fromtimestamp(pokemon['seen'], _tz) + if TWITTER and conf.HASHTAGS: + self.hashtags = conf.HASHTAGS.copy() + else: + self.hashtags = set() + # check if expiration time is known, or a range try: self.tth = pokemon['time_till_hidden'] @@ -289,19 +289,18 @@ def __init__(self, pokemon, score, time_of_day): now + max_delta).strftime('%I:%M %p').lstrip('0') self.map_link = 'https://maps.google.com/maps?q={0[0]:.5f},{0[1]:.5f}'.format(self.coordinates) + self.place = None async def notify(self): - if self.landmarks: - self.landmark = self.landmarks.find_landmark(self.coordinates) + if conf.LANDMARKS and (TWITTER or PUSHBULLET): + self.landmark = conf.LANDMARKS.find_landmark(self.coordinates) + + try: self.place = self.landmark.generate_string(self.coordinates) - if TWITTER: - self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else set() - if self.landmark.hashtags: - self.hashtags.update(self.landmark.hashtags) - else: + if TWITTER and self.landmark.hashtags: + self.hashtags.update(self.landmark.hashtags) + except AttributeError: self.place = self.generic_place_string() - if TWITTER: - self.hashtags = conf.HASHTAGS.copy() if conf.HASHTAGS else set() if PUSHBULLET or TELEGRAM: try: diff --git a/monocle/overseer.py b/monocle/overseer.py index c1b5643cd..8293f1c27 100755 --- a/monocle/overseer.py +++ b/monocle/overseer.py @@ -8,16 +8,14 @@ from time import time, monotonic from aiopogo import HashServer -from pogeo import diagonal_distance, level_edge from sqlalchemy.exc import OperationalError from .db import SIGHTING_CACHE, MYSTERY_CACHE -from .utils import get_current_hour, dump_pickle, get_start_coords, best_factors, percentage_split -from .shared import ACCOUNTS, get_logger, LOOP, run_threaded -from . import bounds, db_proc, spawnid_to_loc, spawns, sanitized as conf +from .utils import get_current_hour, dump_pickle, get_start_coords, get_bootstrap_points, randomize_point, best_factors, percentage_split +from .shared import get_logger, LOOP, run_threaded, ACCOUNTS +from . import bounds, db_proc, spawns, sanitized as conf from .worker import Worker - ANSI = '\x1b[2J\x1b[H' if platform == 'win32': try: @@ -33,7 +31,7 @@ from os import system ANSI = '' -BAD_STATUSES = { +BAD_STATUSES = ( 'FAILED LOGIN', 'EXCEPTION', 'NOT AUTHENTICATED', @@ -53,24 +51,7 @@ 'HASHING ERROR', 'PROXY ERROR', 'TIMEOUT' -} - -_unit = conf.SPEED_UNIT.lower() -if _unit == 'miles': - # miles/hour to meters/second, default to 19.5mph - SPEED_LIMIT = conf.SPEED_LIMIT * 0.44704 if conf.SPEED_LIMIT else 8.71728 - GOOD_ENOUGH = conf.GOOD_ENOUGH * 0.44704 if conf.GOOD_ENOUGH else 0.44704 -elif _unit == 'kilometers': - # kilometers/hour to meters/second, default to 31.38km/h - SPEED_LIMIT = conf.SPEED_LIMIT * 1000 / 3600 if conf.SPEED_LIMIT else 8.71728 - GOOD_ENOUGH = conf.GOOD_ENOUGH * 1000 / 3600 if conf.GOOD_ENOUGH else 0.44704 -elif _unit == 'meters': - # meters/hour to meters/second - SPEED_LIMIT = conf.SPEED_LIMIT / 3600 if conf.SPEED_LIMIT else 8.71728 - GOOD_ENOUGH = conf.GOOD_ENOUGH / 3600 if conf.GOOD_ENOUGH else 0.44704 -else: - raise ValueError("Valid speed units are: 'miles', 'kilometers', and 'meters'") -del _unit +) class Overseer: @@ -189,11 +170,11 @@ def update_stats(self, refresh=conf.STAT_REFRESH, med=median, count=conf.GRID[0] self.update_coroutines_count() self.counts = ( - 'Known spawns: {}, unknown: {}\n' + 'Known spawns: {}, unknown: {}, more: {}\n' '{} workers, {} coroutines\n' 'sightings cache: {}, mystery cache: {}, DB queue: {}\n' ).format( - len(spawns), len(spawns.unknown), + len(spawns), len(spawns.unknown), spawns.cells_count, count, self.coroutines_count, len(SIGHTING_CACHE), len(MYSTERY_CACHE), len(db_proc) ) @@ -268,8 +249,8 @@ def _print_status(self, _ansi=ANSI, _start=datetime.now(), _notify=conf.NOTIFY): ] try: - seen = Worker.seen - captchas = Worker.captchas + seen = Worker.g['seen'] + captchas = Worker.g['captchas'] output.append('Seen per visit: {v:.2f}, per minute: {m:.0f}'.format( v=seen / self.visits, m=seen / (seconds_since_start / 60))) @@ -327,7 +308,7 @@ def longest_running(self): if w.start_time < earliest: worker = w earliest = w.start_time - minutes = (monotonic() - earliest) / 60.0 + minutes = ((time() * 1000) - earliest) / 60000 return worker, minutes def get_start_point(self): @@ -335,12 +316,12 @@ def get_start_point(self): now = time() % 3600 closest = None - for spawn_id, spawn_time in spawns.known.items(): + for spawn_id, spawn_time in spawns.known.values(): time_diff = now - spawn_time - if 0.0 < time_diff < smallest_diff: + if 0 < time_diff < smallest_diff: smallest_diff = time_diff closest = spawn_id - if smallest_diff < 3.0: + if smallest_diff < 3: break return closest @@ -349,7 +330,6 @@ async def update_spawns(self, initial=False): try: await run_threaded(spawns.update) LOOP.create_task(run_threaded(spawns.pickle)) - return except OperationalError as e: self.log.exception('Operational error while trying to update spawns.') if initial: @@ -360,6 +340,8 @@ async def update_spawns(self, initial=False): except Exception as e: self.log.exception('A wild {} appeared while updating spawns!', e.__class__.__name__) await sleep(15, loop=LOOP) + else: + break async def launch(self, bootstrap, pickle): exceptions = 0 @@ -376,7 +358,7 @@ async def launch(self, bootstrap, pickle): return update_spawns = False - self.mysteries = iter(spawns.unknown.copy()) + self.mysteries = spawns.mystery_gen() while True: try: await self._launch(update_spawns) @@ -401,7 +383,7 @@ async def _launch(self, update_spawns): start_point = self.get_start_point() if start_point and not spawns.after_last(): spawns_iter = dropwhile( - lambda s: s[0] != start_point, spawns.items()) + lambda s: s[1][0] != start_point, spawns.items()) else: spawns_iter = iter(spawns.items()) @@ -411,7 +393,7 @@ async def _launch(self, update_spawns): captcha_limit = conf.MAX_CAPTCHAS skip_spawn = conf.SKIP_SPAWN - for spawn_id, spawn_seconds in spawns_iter: + for point, (spawn_id, spawn_seconds) in spawns_iter: try: if self.captcha_queue.qsize() > captcha_limit: self.paused = True @@ -426,15 +408,15 @@ async def _launch(self, update_spawns): # positive = already happened time_diff = time() - spawn_time - while time_diff < 0.4: + while time_diff < 0.5: try: - mystery_id = next(self.mysteries) + mystery_point = next(self.mysteries) await self.coroutine_semaphore.acquire() - LOOP.create_task(self.try_spawn(mystery_id, skip_time=conf.GIVE_UP_UNKNOWN)) + LOOP.create_task(self.try_point(mystery_point)) except StopIteration: if self.next_mystery_reload < monotonic(): - self.mysteries = iter(spawns.unknown.copy()) + self.mysteries = spawns.mystery_gen() self.next_mystery_reload = monotonic() + conf.RESCAN_UNKNOWN else: await sleep(min(spawn_time - time() + .5, self.next_mystery_reload - monotonic()), loop=LOOP) @@ -448,13 +430,13 @@ async def _launch(self, update_spawns): continue await self.coroutine_semaphore.acquire() - LOOP.create_task(self.try_spawn(spawn_id, spawn_time)) + LOOP.create_task(self.try_point(point, spawn_time, spawn_id)) async def try_again(self, point): async with self.coroutine_semaphore: worker = await self.best_worker(point, False) async with worker.busy: - if await worker.visit_point(point): + if await worker.visit(point): self.visits += 1 async def bootstrap(self): @@ -477,7 +459,7 @@ async def bootstrap(self): self.log.warning('Starting bootstrap phase 3.') unknowns = list(spawns.unknown) shuffle(unknowns) - tasks = (self.try_again(point) for point in map(spawnid_to_loc, unknowns)) + tasks = (self.try_again(point) for point in unknowns) await gather(*tasks, loop=LOOP) self.log.warning('Finished bootstrapping.') @@ -506,25 +488,22 @@ async def visit_release(worker, num, *args): async def bootstrap_two(self): async def bootstrap_try(point): async with self.coroutine_semaphore: - point.jitter(lat_amount, lon_amount) - LOOP.call_later(1790, LOOP.create_task, self.try_again(point)) + randomized = randomize_point(point, randomization) + LOOP.call_later(1790, LOOP.create_task, self.try_again(randomized)) worker = await self.best_worker(point, False) async with worker.busy: self.visits += await worker.bootstrap_visit(point) # randomize to within ~140m of the nearest neighbor on the second visit - if conf.BOOTSTRAP_LEVEL < 17: - lat_amount, lon_amount = diagonal_distance(bounds.center, level_edge(conf.BOOSTRAP_LEVEL) - 140.0) - else: - lat_amount = lon_amount = 0.0 - tasks = (bootstrap_try(p) for p in bounds.get_points(bootstrap_level)) + randomization = conf.BOOTSTRAP_RADIUS / 155555 - 0.00045 + tasks = (bootstrap_try(x) for x in get_bootstrap_points(bounds)) await gather(*tasks, loop=LOOP) - async def try_spawn(self, spawn_id, spawn_time=None, skip_time=conf.GIVE_UP_KNOWN, _jitter = diagonal_distance(bounds.center, 50.0 if conf.ENCOUNTER else 65.0)): + async def try_point(self, point, spawn_time=None, spawn_id=None): try: - location = spawnid_to_loc(spawn_id) - location.jitter(*_jitter) - worker = await self.best_worker(location, monotonic() + skip_time) + point = randomize_point(point) + skip_time = monotonic() + (conf.GIVE_UP_KNOWN if spawn_time else conf.GIVE_UP_UNKNOWN) + worker = await self.best_worker(point, skip_time) if not worker: if spawn_time: self.skipped += 1 @@ -533,36 +512,34 @@ async def try_spawn(self, spawn_id, spawn_time=None, skip_time=conf.GIVE_UP_KNOW if spawn_time: worker.after_spawn = time() - spawn_time - if await worker.visit_point(location, spawn_id): + if await worker.visit(point, spawn_id): self.visits += 1 except CancelledError: raise except Exception: - self.log.exception('An exception occurred in try_spawn') + self.log.exception('An exception occurred in try_point') finally: self.coroutine_semaphore.release() - async def best_worker(self, location, skip_time, _enough=GOOD_ENOUGH, _limit=SPEED_LIMIT): + async def best_worker(self, point, skip_time): + good_enough = conf.GOOD_ENOUGH while self.running: gen = (w for w in self.workers if not w.busy.locked()) try: worker = next(gen) - current_time = time() - lowest_speed = worker.location.speed_with_time(location, current_time) + lowest_speed = worker.travel_speed(point) except StopIteration: - pass - else: - for w in gen: - speed = w.location.speed_with_time(location, current_time) - if speed < lowest_speed: - if speed <= _enough: - w.speed = speed - return w - lowest_speed = speed - worker = w - if lowest_speed <= _limit: - worker.speed = lowest_speed - return worker + lowest_speed = float('inf') + for w in gen: + speed = w.travel_speed(point) + if speed < lowest_speed: + lowest_speed = speed + worker = w + if speed < good_enough: + break + if lowest_speed < conf.SPEED_LIMIT: + worker.speed = lowest_speed + return worker if skip_time and monotonic() > skip_time: return None await sleep(conf.SEARCH_SLEEP, loop=LOOP) @@ -570,4 +547,5 @@ async def best_worker(self, location, skip_time, _enough=GOOD_ENOUGH, _limit=SPE def refresh_dict(self): while not self.extra_queue.empty(): account = self.extra_queue.get() - ACCOUNTS[account['username']] = account + username = account['username'] + ACCOUNTS[username] = account diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 9f4ecfe99..05aff696e 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -20,20 +20,21 @@ _valid_types = { 'ACCOUNTS': set_sequence, 'ACCOUNTS_CSV': path, - 'ALT_LEVEL': int, + 'ALT_PRECISION': int, 'ALT_RANGE': sequence, 'ALWAYS_NOTIFY': int, 'ALWAYS_NOTIFY_IDS': set_sequence_range, 'APP_SIMULATION': bool, 'AREA_NAME': str, 'AUTHKEY': bytes, - 'BOOTSTRAP_LEVEL': int, - 'BOUNDARIES': tuple, + 'BOOTSTRAP_RADIUS': Number, + 'BOUNDARIES': object, 'CACHE_CELLS': bool, 'CAPTCHAS_ALLOWED': int, 'CAPTCHA_KEY': str, 'COMPLETE_TUTORIAL': bool, 'COROUTINES_LIMIT': int, + 'DB': dict, 'DB_ENGINE': str, 'DIRECTORY': path, 'DISCORD_INVITE_ID': str, @@ -53,7 +54,6 @@ 'HASHTAGS': set_sequence, 'HASH_KEY': (str,) + set_sequence, 'HEATMAP': bool, - 'HOLES': tuple, 'IGNORE_IVS': bool, 'IGNORE_RARITY': bool, 'IMAGE_STATS': bool, @@ -61,7 +61,7 @@ 'INITIAL_SCORE': Number, 'ITEM_LIMITS': dict, 'IV_FONT': str, - 'LANDMARKS': sequence, + 'LANDMARKS': object, 'LANGUAGE': str, 'LAST_MIGRATION': Number, 'LOAD_CUSTOM_CSS_FILE': bool, @@ -79,12 +79,12 @@ 'MAX_RETRIES': int, 'MINIMUM_RUNTIME': Number, 'MINIMUM_SCORE': Number, + 'MORE_POINTS': bool, 'MOVE_FONT': str, - 'MULTI_BOUNDARIES': tuple, 'NAME_FONT': str, 'NEVER_NOTIFY_IDS': set_sequence_range, 'NOTIFY': bool, - 'NOTIFY_IDS': sequence, + 'NOTIFY_IDS': set_sequence_range, 'NOTIFY_RANKING': int, 'PASS': str, 'PB_API_KEY': str, @@ -92,7 +92,6 @@ 'PLAYER_LOCALE': dict, 'PROVIDER': str, 'PROXIES': set_sequence, - 'QUERY_SUFFIX': str, 'RARE_IDS': set_sequence_range, 'RARITY_OVERRIDE': dict, 'REFRESH_RATE': Number, @@ -133,14 +132,14 @@ _defaults = { 'ACCOUNTS': None, 'ACCOUNTS_CSV': None, - 'ALT_LEVEL': 13, - 'ALT_RANGE': (390.0, 490.0), + 'ALT_PRECISION': 2, + 'ALT_RANGE': (300, 400), 'ALWAYS_NOTIFY': 0, - 'ALWAYS_NOTIFY_IDS': frozenset(), + 'ALWAYS_NOTIFY_IDS': set(), 'APP_SIMULATION': True, 'AREA_NAME': 'Area', 'AUTHKEY': b'm3wtw0', - 'BOOTSTRAP_LEVEL': 16, + 'BOOTSTRAP_RADIUS': 120, 'BOUNDARIES': None, 'CACHE_CELLS': False, 'CAPTCHAS_ALLOWED': 3, @@ -157,13 +156,12 @@ 'FB_PAGE_ID': None, 'FIXED_OPACITY': False, 'FORCED_KILL': None, - 'FULL_TIME': 1800.0, - 'GIVE_UP_KNOWN': 75.0, - 'GIVE_UP_UNKNOWN': 60.0, - 'GOOD_ENOUGH': None, + 'FULL_TIME': 1800, + 'GIVE_UP_KNOWN': 75, + 'GIVE_UP_UNKNOWN': 60, + 'GOOD_ENOUGH': 0.1, 'GOOGLE_MAPS_KEY': '', 'HASHTAGS': None, - 'HOLES': None, 'IGNORE_IVS': False, 'IGNORE_RARITY': False, 'IMAGE_STATS': False, @@ -185,11 +183,11 @@ 'MAP_WORKERS': True, 'MAX_CAPTCHAS': 0, 'MAX_RETRIES': 3, - 'MINIMUM_RUNTIME': 10.0, + 'MINIMUM_RUNTIME': 10, + 'MORE_POINTS': False, 'MOVE_FONT': 'sans-serif', - 'MULTI_BOUNDARIES': None, 'NAME_FONT': 'sans-serif', - 'NEVER_NOTIFY_IDS': frozenset(), + 'NEVER_NOTIFY_IDS': (), 'NOTIFY': False, 'NOTIFY_IDS': None, 'NOTIFY_RANKING': None, @@ -199,33 +197,32 @@ 'PLAYER_LOCALE': {'country': 'US', 'language': 'en', 'timezone': 'America/Denver'}, 'PROVIDER': None, 'PROXIES': None, - 'RARE_IDS': frozenset(), + 'RARE_IDS': (), 'RARITY_OVERRIDE': {}, - 'QUERY_SUFFIX': None, 'REFRESH_RATE': 0.6, 'REPORT_MAPS': True, 'REPORT_SINCE': None, - 'RESCAN_UNKNOWN': 90.0, + 'RESCAN_UNKNOWN': 90, 'SCAN_DELAY': 10, 'SEARCH_SLEEP': 2.5, 'SHOW_TIMER': False, 'SIMULTANEOUS_LOGINS': 2, 'SIMULTANEOUS_SIMULATION': 4, - 'SKIP_SPAWN': 90.0, + 'SKIP_SPAWN': 90, 'SMART_THROTTLE': False, 'SPAWN_ID_INT': True, - 'SPEED_LIMIT': None, + 'SPEED_LIMIT': 19.5, 'SPEED_UNIT': 'miles', - 'SPIN_COOLDOWN': 300.0, + 'SPIN_COOLDOWN': 300, 'SPIN_POKESTOPS': True, - 'STAT_REFRESH': 5.0, + 'STAT_REFRESH': 5, 'STAY_WITHIN_MAP': True, 'SWAP_OLDEST': 21600 / worker_count, 'TELEGRAM_BOT_TOKEN': None, 'TELEGRAM_CHAT_ID': None, 'TELEGRAM_USERNAME': None, - 'TIME_REQUIRED': 600.0, - 'TRASH_IDS': frozenset(), + 'TIME_REQUIRED': 300, + 'TRASH_IDS': (), 'TWEET_IMAGES': False, 'TWITTER_ACCESS_KEY': None, 'TWITTER_ACCESS_SECRET': None, @@ -237,50 +234,23 @@ 'WEBHOOKS': None } -_cast = { - 'ALWAYS_NOTIFY_IDS': set, - 'ENCOUNTER_IDS': set, - 'FULL_TIME': float, - 'GIVE_UP_KNOWN': float, - 'GIVE_UP_UNKNOWN': float, - 'GOOD_ENOUGH': float, - 'INITIAL_SCORE': float, - 'LOGIN_TIMEOUT': float, - 'MAP_FILTER_IDS': tuple, - 'MINIMUM_RUNTIME': float, - 'MINIMUM_SCORE': float, - 'NEVER_NOTIFY_IDS': set, - 'RARE_IDS': set, - 'REFRESH_RATE': float, - 'SCAN_DELAY': float, - 'SEARCH_SLEEP': float, - 'SKIP_SPAWN': float, - 'SMART_THROTTLE': float, - 'SPEED_LIMIT': float, - 'SPIN_COOLDOWN': float, - 'STAT_REFRESH': float, - 'SWAP_OLDEST': float, - 'TIME_REQUIRED': float, - 'TRASH_IDS': set -} - class Config: __spec__ = __spec__ __slots__ = tuple(_valid_types.keys()) + ('log',) - def __init__(self, valid_types=_valid_types, defaults=_defaults, cast=_cast): + def __init__(self): self.log = getLogger('sanitizer') for key, value in (x for x in vars(config).items() if x[0].isupper()): try: - if isinstance(value, valid_types[key]): - setattr(self, key, value if key not in cast else cast[key](value)) - if key in defaults: - del defaults[key] - elif key in defaults and value is defaults[key]: - setattr(self, key, defaults.pop(key)) + if isinstance(value, _valid_types[key]): + setattr(self, key, value) + if key in _defaults: + del _defaults[key] + elif key in _defaults and value is _defaults[key]: + setattr(self, key, _defaults.pop(key)) else: - valid = valid_types[key] + valid = _valid_types[key] actual = type(value).__name__ if isinstance(valid, type): err = '{} must be {}. Yours is: {}.'.format( @@ -307,4 +277,4 @@ def __getattr__(self, name): sys.modules[__name__] = Config() -del _cast, _valid_types, config +del _valid_types, config diff --git a/monocle/spawns.py b/monocle/spawns.py index 0c5ea7c86..1500583dc 100644 --- a/monocle/spawns.py +++ b/monocle/spawns.py @@ -2,26 +2,20 @@ from collections import deque, OrderedDict from time import time +from itertools import chain +from hashlib import sha256 from . import bounds, db, sanitized as conf from .shared import get_logger from .utils import dump_pickle, load_pickle, get_current_hour, time_until_time -contains_spawn = bounds.contains_cellid if conf.SPAWN_ID_INT else bounds.contains_token - -class Spawns: +class BaseSpawns: """Manage spawn points and times""" - - __spec__ = __spec__ - __slots__ = ('known', 'despawn_times', 'unknown', 'log') - def __init__(self): ## Spawns with known times - # {spawn_id: spawn_seconds} + # {(lat, lon): (spawn_id, spawn_seconds)} self.known = OrderedDict() - - # points may not be in bounds, but are visible from within bounds # {spawn_id: despawn_seconds} self.despawn_times = {} @@ -29,6 +23,8 @@ def __init__(self): # {(lat, lon)} self.unknown = set() + self.class_version = 3 + self.db_hash = sha256(conf.DB_ENGINE.encode()).digest() self.log = get_logger('spawns') def __len__(self): @@ -37,56 +33,64 @@ def __len__(self): def __bool__(self): return len(self.despawn_times) > 0 - def items(self): - return self.known.items() - - def add_known(self, spawn_id, despawn_time): - self.despawn_times[spawn_id] = despawn_time - self.unknown.discard(spawn_id) + def update(self): + bound = bool(bounds) + last_migration = conf.LAST_MIGRATION - def update(self, _migration=conf.LAST_MIGRATION, _contains=contains_spawn): - known = {} with db.session_scope() as session: - query = session.query(db.Spawnpoint.spawn_id, db.Spawnpoint.despawn_time, db.Spawnpoint.duration, db.Spawnpoint.updated) - for spawn_id, despawn_time, duration, updated in query: + query = session.query(db.Spawnpoint) + if bound or conf.STAY_WITHIN_MAP: + query = query.filter(db.Spawnpoint.lat >= bounds.south, + db.Spawnpoint.lat <= bounds.north, + db.Spawnpoint.lon >= bounds.west, + db.Spawnpoint.lon <= bounds.east) + known = {} + for spawn in query: + point = spawn.lat, spawn.lon + # skip if point is not within boundaries (if applicable) - if not _contains(spawn_id): + if bound and point not in bounds: continue - if not updated or updated < _migration: - self.unknown.add(spawn_id) + if not spawn.updated or spawn.updated <= last_migration: + self.unknown.add(point) continue - self.despawn_times[spawn_id] = despawn_time + if spawn.duration == 60: + spawn_time = spawn.despawn_time + else: + spawn_time = (spawn.despawn_time + 1800) % 3600 - known[spawn_id] = despawn_time if duration == 60 else (despawn_time + 1800) % 3600 - if known: - self.known = OrderedDict(sorted(known.items(), key=lambda k: k[1])) + self.despawn_times[spawn.spawn_id] = spawn.despawn_time + known[point] = spawn.spawn_id, spawn_time + self.known = OrderedDict(sorted(known.items(), key=lambda k: k[1][1])) def after_last(self): try: k = next(reversed(self.known)) - return time() % 3600 > self.known[k] + seconds = self.known[k][1] + return time() % 3600 > seconds except (StopIteration, KeyError, TypeError): return False def get_despawn_time(self, spawn_id, seen): + hour = get_current_hour(now=seen) try: - despawn_time = self.despawn_times[spawn_id] + get_current_hour(now=seen) - return despawn_time if seen < despawn_time else despawn_time + 3600 + despawn_time = self.despawn_times[spawn_id] + hour + if seen > despawn_time: + despawn_time += 3600 + return despawn_time except KeyError: return None def unpickle(self): try: state = load_pickle('spawns', raise_exception=True) - if (state['class_version'] == 4 - and state['db_hash'] == db.DB_HASH - and state['bounds_hash'] == hash(bounds) - and state['last_migration'] == conf.LAST_MIGRATION): - self.despawn_times = state['despawn_times'] - self.known = state['known'] - self.unknown = state['unknown'] + if all((state['class_version'] == self.class_version, + state['db_hash'] == self.db_hash, + state['bounds_hash'] == hash(bounds), + state['last_migration'] == conf.LAST_MIGRATION)): + self.__dict__.update(state) return True else: self.log.warning('Configuration changed, reloading spawns from DB.') @@ -97,18 +101,79 @@ def unpickle(self): return False def pickle(self): - dump_pickle('spawns', { - 'bounds_hash': hash(bounds), - 'class_version': 4, - 'db_hash': db.DB_HASH, - 'despawn_times': self.despawn_times, - 'known': self.known, - 'last_migration': conf.LAST_MIGRATION, - 'unknown': self.unknown}) + state = self.__dict__.copy() + del state['log'] + state.pop('cells_count', None) + state['bounds_hash'] = hash(bounds) + state['last_migration'] = conf.LAST_MIGRATION + dump_pickle('spawns', state) @property def total_length(self): - return len(self.despawn_times) + len(self.unknown) + return len(self.despawn_times) + len(self.unknown) + self.cells_count + + +class Spawns(BaseSpawns): + def __init__(self): + super().__init__() + self.cells_count = 0 + + def items(self): + return self.known.items() + + def add_known(self, spawn_id, despawn_time, point): + self.despawn_times[spawn_id] = despawn_time + self.unknown.discard(point) + + def add_unknown(self, point): + self.unknown.add(point) + def unpickle(self): + result = super().unpickle() + try: + del self.cell_points + except AttributeError: + pass + return result + + def mystery_gen(self): + for mystery in self.unknown.copy(): + yield mystery + + +class MoreSpawns(BaseSpawns): + def __init__(self): + super().__init__() + + ## Coordinates mentioned as "spawn_points" in GetMapObjects response + ## May or may not be actual spawn points, more research is needed. + # {(lat, lon)} + self.cell_points = set() + + def items(self): + # return a copy since it may be modified + return self.known.copy().items() + + def add_known(self, spawn_id, despawn_time, point): + self.despawn_times[spawn_id] = despawn_time + # add so that have_point() will be up to date + self.known[point] = None + self.unknown.discard(point) + self.cell_points.discard(point) + + def add_unknown(self, point): + self.unknown.add(point) + self.cell_points.discard(point) + + def have_point(self, point): + return point in chain(self.cell_points, self.known, self.unknown) + + def mystery_gen(self): + for mystery in chain(self.unknown.copy(), self.cell_points.copy()): + yield mystery + + @property + def cells_count(self): + return len(self.cell_points) -sys.modules[__name__] = Spawns() +sys.modules[__name__] = MoreSpawns() if conf.MORE_POINTS else Spawns() diff --git a/monocle/static/js/main.js b/monocle/static/js/main.js index 95b281ffb..b83c147cd 100644 --- a/monocle/static/js/main.js +++ b/monocle/static/js/main.js @@ -4,337 +4,350 @@ var _WorkerIconUrl = 'static/monocle-icons/assets/ball.png'; var _PokestopIconUrl = 'static/monocle-icons/assets/stop.png'; var PokemonIcon = L.Icon.extend({ - options: {popupAnchor: [0, -15]}, - createIcon: function() { - var div = document.createElement('div'); - div.innerHTML = '
' + - '
' + - '' + - '
' + - '
' + - calculateRemainingTime(this.options.expire) + '
' + - '
'; - return div; - } + options: { + popupAnchor: [0, -15] + }, + createIcon: function() { + var div = document.createElement('div'); + div.innerHTML = + '
' + + '
' + + '' + + '
' + + '
' + calculateRemainingTime(this.options.expires_at) + '
' + + '
'; + return div; + } }); -var FortIcon = - L.Icon.extend({options: {iconSize: [20, 20], popupAnchor: [0, -10], className: 'fort-icon'}}); -var WorkerIcon = L.Icon.extend( - {options: {iconSize: [20, 20], className: 'worker-icon', iconUrl: _WorkerIconUrl}}); -var PokestopIcon = L.Icon.extend( - {options: {iconSize: [10, 20], className: 'pokestop-icon', iconUrl: _PokestopIconUrl}}); +var FortIcon = L.Icon.extend({ + options: { + iconSize: [20, 20], + popupAnchor: [0, -10], + className: 'fort-icon' + } +}); +var WorkerIcon = L.Icon.extend({ + options: { + iconSize: [20, 20], + className: 'worker-icon', + iconUrl: _WorkerIconUrl + } +}); +var PokestopIcon = L.Icon.extend({ + options: { + iconSize: [10, 20], + className: 'pokestop-icon', + iconUrl: _PokestopIconUrl + } +}); var markers = {}; var overlays = { - Pokemon: L.layerGroup([]), - Trash: L.layerGroup([]), - Gyms: L.layerGroup([]), - Pokestops: L.layerGroup([]), - Workers: L.layerGroup([]), - Spawns: L.layerGroup([]), - ScanArea: L.layerGroup([]) + Pokemon: L.layerGroup([]), + Trash: L.layerGroup([]), + Gyms: L.layerGroup([]), + Pokestops: L.layerGroup([]), + Workers: L.layerGroup([]), + Spawns: L.layerGroup([]), + ScanArea: L.layerGroup([]) }; -function unsetHidden(event) { - event.target.hidden = false; +function unsetHidden (event) { + event.target.hidden = false; } -function setHidden(event) { - event.target.hidden = true; +function setHidden (event) { + event.target.hidden = true; } -function monitor(group, initial) { - group.hidden = initial; - group.on('add', unsetHidden); - group.on('remove', setHidden); +function monitor (group, initial) { + group.hidden = initial; + group.on('add', unsetHidden); + group.on('remove', setHidden); } -monitor(overlays.Pokemon, false); -monitor(overlays.Trash, true); -monitor(overlays.Gyms, true); -monitor(overlays.Workers, false); - -function getPopupContent(item) { - var diff = (item.expire - new Date().getTime() / 1000); - var minutes = Math.floor(diff / 60); - var seconds = Math.floor(diff % 60); - var expire = minutes + 'm ' + seconds + 's'; - var content = '' + item.name + ' - #' + item.pid + ''; - if (item.atk != undefined) { - var totaliv = 100 * (item.atk + item.def + item.sta) / 45; - content += ' - ' + totaliv.toFixed(2) + '%
'; - content += 'Disappears in: ' + expire + '
'; - content += 'Move 1: ' + item.move1 + ' ( ' + item.damage1 + ' dps )
'; - content += 'Move 2: ' + item.move2 + ' ( ' + item.damage2 + ' dps )
'; - content += 'IV: ' + item.atk + ' atk, ' + item.def + ' def, ' + item.sta + ' sta
'; - } else { - content += '
Disappears in: ' + expire + '
'; - } - content += 'Hide'; - content += '  |  '; - - var userPref = getPreference('filter-' + item.pid); - if (userPref == 'trash') { - content += 'Move to Pokemon'; - } else { - content += 'Move to Trash'; - } - content += '
=> Get directions'; - return content; +monitor(overlays.Pokemon, false) +monitor(overlays.Trash, true) +monitor(overlays.Gyms, true) +monitor(overlays.Workers, false) + +function getPopupContent (item) { + var diff = (item.expires_at - new Date().getTime() / 1000); + var minutes = parseInt(diff / 60); + var seconds = parseInt(diff - (minutes * 60)); + var expires_at = minutes + 'm ' + seconds + 's'; + var content = '' + item.name + ' - #' + item.pokemon_id + ''; + if(item.atk != undefined){ + var totaliv = 100 * (item.atk + item.def + item.sta) / 45; + content += ' - ' + totaliv.toFixed(2) + '%
'; + content += 'Disappears in: ' + expires_at + '
'; + content += 'Move 1: ' + item.move1 + ' ( ' + item.damage1 + ' dps )
'; + content += 'Move 2: ' + item.move2 + ' ( ' + item.damage2 + ' dps )
'; + content += 'IV: ' + item.atk + ' atk, ' + item.def + ' def, ' + item.sta + ' sta
' + } else { + content += '
Disappears in: ' + expires_at + '
'; + } + content += 'Hide'; + content += '  |  '; + + var userPref = getPreference('filter-'+item.pokemon_id); + if (userPref == 'trash'){ + content += 'Move to Pokemon'; + }else{ + content += 'Move to Trash'; + } + content += '
=> Get directions'; + return content; } -function getOpacity(diff) { - if (diff > 300 || getPreference('FIXED_OPACITY') === '1') { - return 1; - } - return 0.5 + diff / 600; +function getOpacity (diff) { + if (diff > 300 || getPreference('FIXED_OPACITY') === "1") { + return 1; + } + return 0.5 + diff / 600; } -function PokemonMarker(raw) { - var icon = new PokemonIcon( - {iconUrl: '/static/monocle-icons/icons/' + raw.pid + '.png', expire: raw.expire}); - var marker = L.marker([raw.lat, raw.lon], {icon: icon, opacity: 1}); - - if (_last_pokemon_id < raw.id) { - _last_pokemon_id = raw.id; - } - - if (raw.trash) { - marker.overlay = 'Trash'; - } else { - marker.overlay = 'Pokemon'; - } - var userPreference = getPreference('filter-' + raw.pid); - if (userPreference === 'pokemon') { - marker.overlay = 'Pokemon'; - } else if (userPreference === 'trash') { - marker.overlay = 'Trash'; - } else if (userPreference === 'hidden') { - marker.overlay = 'Hidden'; - } - marker.raw = raw; - markers[raw.id] = marker; - marker.on('popupopen', function popupopen(event) { - event.popup.options.autoPan = true; // Pan into view once - event.popup.setContent(getPopupContent(event.target.raw)); - event.target.popupInterval = setInterval(function() { - event.popup.setContent(getPopupContent(event.target.raw)); - event.popup.options.autoPan = false; // Don't fight user panning - }, 1000); - }); - marker.on('popupclose', function(event) { - clearInterval(event.target.popupInterval); - }); - marker.setOpacity(getOpacity(marker.raw)); - marker.opacityInterval = setInterval(function() { - if (marker.overlay === 'Hidden' || overlays[marker.overlay].hidden) { - return; +function PokemonMarker (raw) { + var icon = new PokemonIcon({iconUrl: '/static/monocle-icons/icons/' + raw.pokemon_id + '.png', expires_at: raw.expires_at}); + var marker = L.marker([raw.lat, raw.lon], {icon: icon, opacity: 1}); + + var intId = parseInt(raw.id.split('-')[1]); + if (_last_pokemon_id < intId){ + _last_pokemon_id = intId; } - var diff = marker.raw.expire - new Date().getTime() / 1000; - if (diff > 0) { - marker.setOpacity(getOpacity(diff)); + + if (raw.trash) { + marker.overlay = 'Trash'; } else { - marker.removeFrom(overlays[marker.overlay]); - markers[marker.raw.id] = undefined; - clearInterval(marker.opacityInterval); + marker.overlay = 'Pokemon'; } - }, 2500); - marker.bindPopup(); - return marker; + var userPreference = getPreference('filter-'+raw.pokemon_id); + if (userPreference === 'pokemon'){ + marker.overlay = 'Pokemon'; + }else if (userPreference === 'trash'){ + marker.overlay = 'Trash'; + }else if (userPreference === 'hidden'){ + marker.overlay = 'Hidden'; + } + marker.raw = raw; + markers[raw.id] = marker; + marker.on('popupopen',function popupopen (event) { + event.popup.options.autoPan = true; // Pan into view once + event.popup.setContent(getPopupContent(event.target.raw)); + event.target.popupInterval = setInterval(function () { + event.popup.setContent(getPopupContent(event.target.raw)); + event.popup.options.autoPan = false; // Don't fight user panning + }, 1000); + }); + marker.on('popupclose', function (event) { + clearInterval(event.target.popupInterval); + }); + marker.setOpacity(getOpacity(marker.raw)); + marker.opacityInterval = setInterval(function () { + if (marker.overlay === "Hidden" || overlays[marker.overlay].hidden) { + return; + } + var diff = marker.raw.expires_at - new Date().getTime() / 1000; + if (diff > 0) { + marker.setOpacity(getOpacity(diff)); + } else { + marker.removeFrom(overlays[marker.overlay]); + markers[marker.raw.id] = undefined; + clearInterval(marker.opacityInterval); + } + }, 2500); + marker.bindPopup(); + return marker; } -function FortMarker(raw) { - var icon = new FortIcon({iconUrl: '/static/monocle-icons/forts/' + raw.team + '.png'}); - var marker = L.marker([raw.lat, raw.lon], {icon: icon, opacity: 1}); - marker.raw = raw; - markers[raw.id] = marker; - marker.on('popupopen', function popupopen(event) { - var content = ''; - if (raw.team === 0) { - content = 'An empty Gym!'; - } else { - if (raw.team === 1) { - content = 'Team Mystic'; - } else if (raw.team === 2) { - content = 'Team Valor'; - } else if (raw.team === 3) { - content = 'Team Instinct'; - } - content += '
Prestige: ' + raw.prestige + '
Guarding Pokemon: ' + raw.pokemon_name + - ' (#' + raw.pid + ')'; - } - content += '
=> Get directions'; - event.popup.setContent(content); - }); - marker.bindPopup(); - return marker; +function FortMarker (raw) { + var icon = new FortIcon({iconUrl: '/static/monocle-icons/forts/' + raw.team + '.png'}); + var marker = L.marker([raw.lat, raw.lon], {icon: icon, opacity: 1}); + marker.raw = raw; + markers[raw.id] = marker; + marker.on('popupopen',function popupopen (event) { + var content = '' + if (raw.team === 0) { + content = 'An empty Gym!' + } + else { + if (raw.team === 1 ) { + content = 'Team Mystic' + } + else if (raw.team === 2 ) { + content = 'Team Valor' + } + else if (raw.team === 3 ) { + content = 'Team Instinct' + } + content += '
Prestige: ' + raw.prestige + + '
Guarding Pokemon: ' + raw.pokemon_name + ' (#' + raw.pokemon_id + ')'; + } + content += '
=> Get directions'; + event.popup.setContent(content); + }); + marker.bindPopup(); + return marker; } -function WorkerMarker(raw) { - var icon = new WorkerIcon(); - var marker = L.marker([raw.lat, raw.lon], {icon: icon}); - var circle = L.circle([raw.lat, raw.lon], 70, {weight: 2}); - var group = L.featureGroup([marker, circle]) - .bindPopup( - 'Worker ' + raw.worker_no + '
time: ' + raw.time + - '
speed: ' + raw.speed + '
total seen: ' + raw.total_seen + - '
visits: ' + raw.visits + '
seen here: ' + raw.seen_here); - return group; +function WorkerMarker (raw) { + var icon = new WorkerIcon(); + var marker = L.marker([raw.lat, raw.lon], {icon: icon}); + var circle = L.circle([raw.lat, raw.lon], 70, {weight: 2}); + var group = L.featureGroup([marker, circle]) + .bindPopup('Worker ' + raw.worker_no + '
time: ' + raw.time + '
speed: ' + raw.speed + '
total seen: ' + raw.total_seen + '
visits: ' + raw.visits + '
seen here: ' + raw.seen_here); + return group; } -function addPokemonToMap(data) { - data.forEach(function(item) { - // Already placed? No need to do anything, then - if (item.id in markers) { - return; - } - var marker = PokemonMarker(item); - if (marker.overlay !== 'Hidden') { - marker.addTo(overlays[marker.overlay]); +function addPokemonToMap (data, map) { + data.forEach(function (item) { + // Already placed? No need to do anything, then + if (item.id in markers) { + return; + } + var marker = PokemonMarker(item); + if (marker.overlay !== "Hidden"){ + marker.addTo(overlays[marker.overlay]) + } + }); + updateTime(); + if (_updateTimeInterval === null){ + _updateTimeInterval = setInterval(updateTime, 1000); } - }); - updateTime(); - if (_updateTimeInterval === null) { - _updateTimeInterval = setInterval(updateTime, 1000); - } } -function addGymsToMap(data) { - data.forEach(function(item) { - // No change since last time? Then don't do anything - var existing = markers[item.id]; - if (typeof existing !== 'undefined') { - if (existing.raw.sighting_id === item.sighting_id) { - return; - } - existing.removeFrom(overlays.Gyms); - markers[item.id] = undefined; - } - marker = FortMarker(item); - marker.addTo(overlays.Gyms); - }); +function addGymsToMap (data, map) { + data.forEach(function (item) { + // No change since last time? Then don't do anything + var existing = markers[item.id]; + if (typeof existing !== 'undefined') { + if (existing.raw.sighting_id === item.sighting_id) { + return; + } + existing.removeFrom(overlays.Gyms); + markers[item.id] = undefined; + } + marker = FortMarker(item); + marker.addTo(overlays.Gyms); + }); } -function addSpawnsToMap(data) { - data.forEach(function(item) { - var circle = L.circle([item.lat, item.lon], 5, {weight: 2}); - if (item.despawn_time === "?") { - circle.setStyle({color: '#f03'}); - } - circle.bindPopup( - 'Spawn ' + item.spawn_id + '' + - '
despawn: ' + item.despawn_time + - '
duration: ' + item.duration + 'm' + - '
=> Get directions'); - circle.addTo(overlays.Spawns); - }); +function addSpawnsToMap (data, map) { + data.forEach(function (item) { + var circle = L.circle([item.lat, item.lon], 5, {weight: 2}); + var time = '??'; + if (item.despawn_time != null) { + time = '' + Math.floor(item.despawn_time/60) + 'min ' + + (item.despawn_time%60) + 'sec'; + } + else { + circle.setStyle({color: '#f03'}) + } + circle.bindPopup('Spawn ' + item.spawn_id + '' + + '
despawn: ' + time + + '
duration: '+ (item.duration == null ? '30mn' : item.duration + 'mn') + + '
=> Get directions'); + circle.addTo(overlays.Spawns); + }); } -function addPokestopsToMap(data) { - data.forEach(function(item) { - var icon = new PokestopIcon(); - var marker = L.marker([item[1], item[2]], {icon: icon}); - marker.raw = item; - marker.bindPopup( - 'Pokestop: ' + item[0] + '' + - '
=> Get directions'); - marker.addTo(overlays.Pokestops); - }); +function addPokestopsToMap (data, map) { + data.forEach(function (item) { + var icon = new PokestopIcon(); + var marker = L.marker([item.lat, item.lon], {icon: icon}); + marker.raw = item; + marker.bindPopup('Pokestop: ' + item.external_id + '' + + '
=> Get directions'); + marker.addTo(overlays.Pokestops); + }); } -function addScanAreaToMap(data) { - data.areas.forEach(function(item) { - L.polyline(item).addTo(overlays.ScanArea); - }); - data.holes.forEach(function(item) { - L.polyline(item, {'color': 'red'}).addTo(overlays.ScanArea); - }); +function addScanAreaToMap (data, map) { + data.forEach(function (item) { + if (item.type === 'scanarea'){ + L.polyline(item.coords).addTo(overlays.ScanArea); + } else if (item.type === 'scanblacklist'){ + L.polyline(item.coords, {'color':'red'}).addTo(overlays.ScanArea); + } + }); } -function addWorkersToMap(data) { - overlays.Workers.clearLayers(); - data.forEach(function(item) { - marker = WorkerMarker(item); - marker.addTo(overlays.Workers); - }); +function addWorkersToMap (data, map) { + overlays.Workers.clearLayers() + data.forEach(function (item) { + marker = WorkerMarker(item); + marker.addTo(overlays.Workers); + }); } -function getPokemon() { - if (overlays.Pokemon.hidden && overlays.Trash.hidden) { - return; - } - new Promise(function(resolve, reject) { - $.get('/data?last_id=' + _last_pokemon_id, function(response) { - resolve(response); +function getPokemon () { + if (overlays.Pokemon.hidden && overlays.Trash.hidden) { + return; + } + new Promise(function (resolve, reject) { + $.get('/data?last_id='+_last_pokemon_id, function (response) { + resolve(response); + }); + }).then(function (data) { + addPokemonToMap(data, map); }); - }).then(function(data) { - addPokemonToMap(data); - }); } -function getGyms() { - if (overlays.Gyms.hidden) { - return; - } - new Promise(function(resolve, reject) { - $.get('/gym_data', function(response) { - resolve(response); +function getGyms () { + if (overlays.Gyms.hidden) { + return; + } + new Promise(function (resolve, reject) { + $.get('/gym_data', function (response) { + resolve(response); + }); + }).then(function (data) { + addGymsToMap(data, map); }); - }).then(function(data) { - addGymsToMap(data); - }); } function getSpawnPoints() { - new Promise(function(resolve, reject) { - $.get('/spawnpoints', function(response) { - resolve(response); + new Promise(function (resolve, reject) { + $.get('/spawnpoints', function (response) { + resolve(response); + }); + }).then(function (data) { + addSpawnsToMap(data, map); }); - }).then(function(data) { - addSpawnsToMap(data); - }); } function getPokestops() { - new Promise(function(resolve, reject) { - $.get('/pokestops', function(response) { - resolve(response); + new Promise(function (resolve, reject) { + $.get('/pokestops', function (response) { + resolve(response); + }); + }).then(function (data) { + addPokestopsToMap(data, map); }); - }).then(function(data) { - addPokestopsToMap(data); - }); } function getScanAreaCoords() { - new Promise(function(resolve, reject) { - $.get('/scan_coords', function(response) { - resolve(response); + new Promise(function (resolve, reject) { + $.get('/scan_coords', function (response) { + resolve(response); + }); + }).then(function (data) { + addScanAreaToMap(data, map); }); - }).then(function(data) { - addScanAreaToMap(data); - }); } function getWorkers() { - if (overlays.Workers.hidden) { - return; - } - new Promise(function(resolve, reject) { - $.get('/workers_data', function(response) { - resolve(response); + if (overlays.Workers.hidden) { + return; + } + new Promise(function (resolve, reject) { + $.get('/workers_data', function (response) { + resolve(response); + }); + }).then(function (data) { + addWorkersToMap(data, map); }); - }).then(function(data) { - addWorkersToMap(data); - }); } var map = L.map('main-map', {preferCanvas: true}).setView(_MapCoords, 13); @@ -343,198 +356,196 @@ overlays.Pokemon.addTo(map); overlays.ScanArea.addTo(map); var control = L.control.layers(null, overlays).addTo(map); -L.tileLayer(_MapProviderUrl, {opacity: 0.75, attribution: _MapProviderAttribution}).addTo(map); -map.whenReady(function() { - $('.my-location').on('click', function() { - map.locate({enableHighAccurracy: true, setView: true}); - }); - overlays.Gyms.once('add', function(e) { - getGyms(); - }); - overlays.Spawns.once('add', function(e) { - getSpawnPoints(); - }); - overlays.Pokestops.once('add', function(e) { - getPokestops(); - }); - getScanAreaCoords(); - getWorkers(); - overlays.Workers.hidden = true; - setInterval(getWorkers, 14000); - getPokemon(); - setInterval(getPokemon, 30000); - setInterval(getGyms, 110000); +L.tileLayer(_MapProviderUrl, { + opacity: 0.75, + attribution: _MapProviderAttribution +}).addTo(map); +map.whenReady(function () { + $('.my-location').on('click', function () { + map.locate({ enableHighAccurracy: true, setView: true }); + }); + overlays.Gyms.once('add', function(e) { + getGyms(); + }) + overlays.Spawns.once('add', function(e) { + getSpawnPoints(); + }) + overlays.Pokestops.once('add', function(e) { + getPokestops(); + }) + getScanAreaCoords(); + getWorkers(); + overlays.Workers.hidden = true; + setInterval(getWorkers, 14000); + getPokemon(); + setInterval(getPokemon, 30000); + setInterval(getGyms, 110000) }); -$('#settings>ul.nav>li>a').on('click', function() { - // Click handler for each tab button. - $(this).parent().parent().children('li').removeClass('active'); - $(this).parent().addClass('active'); - var panel = $(this).data('panel'); - var item = $('#settings>.settings-panel') - .removeClass('active') - .filter('[data-panel=\'' + panel + '\']') - .addClass('active'); +$("#settings>ul.nav>li>a").on('click', function(){ + // Click handler for each tab button. + $(this).parent().parent().children("li").removeClass('active'); + $(this).parent().addClass('active'); + var panel = $(this).data('panel'); + var item = $("#settings>.settings-panel").removeClass('active') + .filter("[data-panel='"+panel+"']").addClass('active'); }); -$('#settings_close_btn').on('click', function() { - // 'X' button on Settings panel - $('#settings').animate({opacity: 0}, 250, function() { - $(this).hide(); - }); +$("#settings_close_btn").on('click', function(){ + // 'X' button on Settings panel + $("#settings").animate({ + opacity: 0 + }, 250, function(){ $(this).hide(); }); }); -$('.my-settings').on('click', function() { - // Settings button on bottom-left corner - $('#settings').show().animate({opacity: 1}, 250); +$('.my-settings').on('click', function () { + // Settings button on bottom-left corner + $("#settings").show().animate({ + opacity: 1 + }, 250); }); -$('#reset_btn').on('click', function() { - // Reset button in Settings>More - if (confirm('This will reset all your preferences. Are you sure?')) { - localStorage.clear(); - location.reload(); - } +$('#reset_btn').on('click', function () { + // Reset button in Settings>More + if (confirm("This will reset all your preferences. Are you sure?")){ + localStorage.clear(); + location.reload(); + } }); -$('body').on('click', '.popup_filter_link', function() { - var id = $(this).data('pokeid'); - var layer = $(this).data('newlayer').toLowerCase(); - moveToLayer(id, layer); - var item = $('#settings button[data-id=\'' + id + '\']'); - item.removeClass('active').filter('[data-value=\'' + layer + '\']').addClass('active'); +$('body').on('click', '.popup_filter_link', function () { + var id = $(this).data("pokeid"); + var layer = $(this).data("newlayer").toLowerCase(); + moveToLayer(id, layer); + var item = $("#settings button[data-id='"+id+"']"); + item.removeClass("active").filter("[data-value='"+layer+"']").addClass("active"); }); -$('#settings').on('click', '.settings-panel button', function() { - // Handler for each button in every settings-panel. - var item = $(this); - if (item.hasClass('active')) { - return; - } - var id = item.data('id'); - var key = item.parent().data('group'); - var value = item.data('value'); - - item.parent().children('button').removeClass('active'); - item.addClass('active'); - - if (key.indexOf('filter-') > -1) { - // This is a pokemon's filter button - moveToLayer(id, value); - } else { - setPreference(key, value); - } +$('#settings').on('click', '.settings-panel button', function () { + //Handler for each button in every settings-panel. + var item = $(this); + if (item.hasClass('active')){ + return; + } + var id = item.data('id'); + var key = item.parent().data('group'); + var value = item.data('value'); + + item.parent().children("button").removeClass("active"); + item.addClass("active"); + + if (key.indexOf('filter-') > -1){ + // This is a pokemon's filter button + moveToLayer(id, value); + }else{ + setPreference(key, value); + } }); -function moveToLayer(id, layer) { - setPreference('filter-' + id, layer); - layer = layer.toLowerCase(); - for (var k in markers) { - var m = markers[k]; - if ((m !== undefined) && (m.raw.pid === id)) { - m.removeFrom(overlays[m.overlay]); - if (layer === 'pokemon') { - m.overlay = 'Pokemon'; - m.addTo(overlays.Pokemon); - } else if (layer === 'trash') { - m.overlay = 'Trash'; - m.addTo(overlays.Trash); - } +function moveToLayer(id, layer){ + setPreference("filter-"+id, layer); + layer = layer.toLowerCase(); + for(var k in markers) { + var m = markers[k]; + if ((k.indexOf("pokemon-") > -1) && (m !== undefined) && (m.raw.pokemon_id === id)){ + m.removeFrom(overlays[m.overlay]); + if (layer === 'pokemon'){ + m.overlay = "Pokemon"; + m.addTo(overlays.Pokemon); + }else if (layer === 'trash') { + m.overlay = "Trash"; + m.addTo(overlays.Trash); + } + } } - } } -function populateSettingsPanels() { - var container = $('.settings-panel[data-panel="filters"]').children('.panel-body'); - var newHtml = ''; - for (var i = 1; i <= _pokemon_count; i++) { - var partHtml = `
- -
- - - +function populateSettingsPanels(){ + var container = $('.settings-panel[data-panel="filters"]').children('.panel-body'); + var newHtml = ''; + for (var i = 1; i <= _pokemon_count; i++){ + var partHtml = `
+ +
+ + +
`; - newHtml += partHtml - } - newHtml += '
'; - container.html(newHtml); + newHtml += partHtml + } + newHtml += '
'; + container.html(newHtml); } -function setSettingsDefaults() { - for (var i = 1; i <= _pokemon_count; i++) { - _defaultSettings['filter-' + i] = - (_defaultSettings['TRASH_IDS'].indexOf(i) > -1) ? 'trash' : 'pokemon'; - }; - - $('#settings div.btn-group').each(function() { - var item = $(this); - var key = item.data('group'); - var value = getPreference(key); - if (value === false) - value = '0'; - else if (value === true) - value = '1'; - item.children('button') - .removeClass('active') - .filter('[data-value=\'' + value + '\']') - .addClass('active'); - }); +function setSettingsDefaults(){ + for (var i = 1; i <= _pokemon_count; i++){ + _defaultSettings['filter-'+i] = (_defaultSettings['TRASH_IDS'].indexOf(i) > -1) ? "trash" : "pokemon"; + }; + + $("#settings div.btn-group").each(function(){ + var item = $(this); + var key = item.data('group'); + var value = getPreference(key); + if (value === false) + value = "0"; + else if (value === true) + value = "1"; + item.children("button").removeClass("active").filter("[data-value='"+value+"']").addClass("active"); + }); } populateSettingsPanels(); setSettingsDefaults(); -function getPreference(key, ret) { - return localStorage.getItem(key) ? localStorage.getItem(key) : - (key in _defaultSettings ? _defaultSettings[key] : ret); +function getPreference(key, ret){ + return localStorage.getItem(key) ? localStorage.getItem(key) : (key in _defaultSettings ? _defaultSettings[key] : ret); } -function setPreference(key, val) { - localStorage.setItem(key, val); +function setPreference(key, val){ + localStorage.setItem(key, val); } -$(window).scroll(function() { - if ($(this).scrollTop() > 100) { - $('.scroll-up').fadeIn(); - } else { - $('.scroll-up').fadeOut(); - } +$(window).scroll(function () { + if ($(this).scrollTop() > 100) { + $('.scroll-up').fadeIn(); + } else { + $('.scroll-up').fadeOut(); + } }); -$('#settings').scroll(function() { - if ($(this).scrollTop() > 100) { - $('.scroll-up').fadeIn(); - } else { - $('.scroll-up').fadeOut(); - } +$("#settings").scroll(function () { + if ($(this).scrollTop() > 100) { + $('.scroll-up').fadeIn(); + } else { + $('.scroll-up').fadeOut(); + } }); -$('.scroll-up').click(function() { - $('html, body, #settings').animate({scrollTop: 0}, 500); - return false; +$('.scroll-up').click(function () { + $("html, body, #settings").animate({ + scrollTop: 0 + }, 500); + return false; }); function calculateRemainingTime(expire_at_timestamp) { var diff = (expire_at_timestamp - new Date().getTime() / 1000); - var minutes = Math.floor(diff / 60); - var seconds = Math.floor(diff % 60); - return minutes + ':' + (seconds > 9 ? '' + seconds : '0' + seconds); + var minutes = parseInt(diff / 60); + var seconds = parseInt(diff - (minutes * 60)); + return minutes + ':' + (seconds > 9 ? "" + seconds: "0" + seconds); } function updateTime() { - if (getPreference('SHOW_TIMER') === '1') { - $('.remaining_text').each(function() { - $(this).css('visibility', 'visible'); - this.innerHTML = calculateRemainingTime($(this).data('expire')); - }); - } else { - $('.remaining_text').each(function() { - $(this).css('visibility', 'hidden'); - }); - } + if (getPreference("SHOW_TIMER") === "1"){ + $(".remaining_text").each(function() { + $(this).css('visibility', 'visible'); + this.innerHTML = calculateRemainingTime($(this).data('expire')); + }); + }else{ + $(".remaining_text").each(function() { + $(this).css('visibility', 'hidden'); + }); + } } diff --git a/monocle/utils.py b/monocle/utils.py index 4edeb4ee1..fd5dcb1d1 100644 --- a/monocle/utils.py +++ b/monocle/utils.py @@ -3,19 +3,46 @@ from os import mkdir from os.path import join, exists from sys import platform +from asyncio import sleep +from math import sqrt from uuid import uuid4 +from enum import Enum from csv import DictReader -from cyrandom import choice +from cyrandom import choice, shuffle, uniform from time import time from pickle import dump as pickle_dump, load as pickle_load, HIGHEST_PROTOCOL -from pogeo import Location +from geopy import Point +from geopy.distance import distance from aiopogo import utilities as pgoapi_utils from pogeo import get_distance from . import bounds, sanitized as conf +IPHONES = {'iPhone5,1': 'N41AP', + 'iPhone5,2': 'N42AP', + 'iPhone5,3': 'N48AP', + 'iPhone5,4': 'N49AP', + 'iPhone6,1': 'N51AP', + 'iPhone6,2': 'N53AP', + 'iPhone7,1': 'N56AP', + 'iPhone7,2': 'N61AP', + 'iPhone8,1': 'N71AP', + 'iPhone8,2': 'N66AP', + 'iPhone8,4': 'N69AP', + 'iPhone9,1': 'D10AP', + 'iPhone9,2': 'D11AP', + 'iPhone9,3': 'D101AP', + 'iPhone9,4': 'D111AP'} + + +class Units(Enum): + miles = 1 + kilometers = 2 + meters = 3 + + def best_factors(n): return next(((i, n//i) for i in range(int(n**0.5), 0, -1) if n % i == 0)) @@ -34,15 +61,69 @@ def percentage_split(seq, percentages): def get_start_coords(worker_no, grid=conf.GRID, bounds=bounds): """Returns center of square for given worker""" - per_column = (grid[0] * grid[1]) // grid[0] + per_column = int((grid[0] * grid[1]) / grid[0]) column = worker_no % per_column - row = worker_no // per_column + row = int(worker_no / per_column) part_lat = (bounds.south - bounds.north) / grid[0] part_lon = (bounds.east - bounds.west) / grid[1] start_lat = bounds.north + part_lat * row + part_lat / 2 start_lon = bounds.west + part_lon * column + part_lon / 2 - return Location(start_lat, start_lon) + return start_lat, start_lon + + +def float_range(start, end, step): + """range for floats, also capable of iterating backwards""" + if start > end: + while end <= start: + yield start + start += -step + else: + while start <= end: + yield start + start += step + + +def get_gains(dist=70): + """Returns lat and lon gain + + Gain is space between circles. + """ + start = Point(*bounds.center) + base = dist * sqrt(3) + height = base * sqrt(3) / 2 + dis_a = distance(meters=base) + dis_h = distance(meters=height) + lon_gain = dis_a.destination(point=start, bearing=90).longitude + lat_gain = dis_h.destination(point=start, bearing=0).latitude + return abs(start.latitude - lat_gain), abs(start.longitude - lon_gain) + + +def round_coords(point, precision, _round=round): + return _round(point[0], precision), _round(point[1], precision) + + +def get_bootstrap_points(bounds): + coords = [] + if bounds.multi: + for b in bounds.polygons: + coords.extend(get_bootstrap_points(b)) + return coords + lat_gain, lon_gain = get_gains(conf.BOOTSTRAP_RADIUS) + west, east = bounds.west, bounds.east + bound = bool(bounds) + for map_row, lat in enumerate( + float_range(bounds.south, bounds.north, lat_gain) + ): + row_start_lon = west + if map_row % 2 != 0: + row_start_lon -= 0.5 * lon_gain + for lon in float_range(row_start_lon, east, lon_gain): + point = lat, lon + if not bound or point in bounds: + coords.append(point) + shuffle(coords) + return coords def get_device_info(account): @@ -50,24 +131,12 @@ def get_device_info(account): 'device': 'iPhone', 'manufacturer': 'Apple'} try: - device_info['product'] = 'iOS' if account['iOS'].startswith('1') else 'iPhone OS' + if account['iOS'].startswith('1'): + device_info['product'] = 'iOS' + else: + device_info['product'] = 'iPhone OS' device_info['hardware'] = account['model'] + '\x00' - iphones = {'iPhone5,1': 'N41AP', - 'iPhone5,2': 'N42AP', - 'iPhone5,3': 'N48AP', - 'iPhone5,4': 'N49AP', - 'iPhone6,1': 'N51AP', - 'iPhone6,2': 'N53AP', - 'iPhone7,1': 'N56AP', - 'iPhone7,2': 'N61AP', - 'iPhone8,1': 'N71AP', - 'iPhone8,2': 'N66AP', - 'iPhone8,4': 'N69AP', - 'iPhone9,1': 'D10AP', - 'iPhone9,2': 'D11AP', - 'iPhone9,3': 'D101AP', - 'iPhone9,4': 'D111AP'} - device_info['model'] = iphones[account['model']] + '\x00' + device_info['model'] = IPHONES[account['model']] + '\x00' except (KeyError, AttributeError): account = generate_device_info(account) return get_device_info(account) @@ -81,7 +150,8 @@ def generate_device_info(account): ios9 = ('9.0', '9.0.1', '9.0.2', '9.1', '9.2', '9.2.1', '9.3', '9.3.1', '9.3.2', '9.3.3', '9.3.4', '9.3.5') ios10 = ('10.0', '10.0.1', '10.0.2', '10.0.3', '10.1', '10.1.1', '10.2', '10.2.1', '10.3', '10.3.1', '10.3.2') - account['model'] = choice(('iPhone5,1', 'iPhone5,2', 'iPhone5,3', 'iPhone5,4', 'iPhone6,1', 'iPhone6,2', 'iPhone7,1', 'iPhone7,2', 'iPhone8,1', 'iPhone8,2', 'iPhone8,4', 'iPhone9,1', 'iPhone9,2', 'iPhone9,3', 'iPhone9,4')) + devices = tuple(IPHONES.keys()) + account['model'] = choice(devices) account['id'] = uuid4().hex @@ -242,3 +312,12 @@ def load_accounts_csv(): for row in reader: accounts[row['username']] = dict(row) return accounts + + +def randomize_point(point, amount=0.0003, randomize=uniform): + '''Randomize point, by up to ~47 meters by default.''' + lat, lon = point + return ( + randomize(lat - amount, lat + amount), + randomize(lon - amount, lon + amount) + ) diff --git a/monocle/web_utils.py b/monocle/web_utils.py index 7dca0d640..684b548d2 100644 --- a/monocle/web_utils.py +++ b/monocle/web_utils.py @@ -3,11 +3,22 @@ from multiprocessing.managers import BaseManager, RemoteError from time import time -from monocle import spawnid_to_coords, sanitized as conf +from monocle import sanitized as conf from monocle.db import get_forts, Pokestop, session_scope, Sighting, Spawnpoint -from monocle.utils import get_address -from monocle.names import POKEMON - +from monocle.utils import Units, get_address +from monocle.names import DAMAGE, MOVES, POKEMON + +if conf.MAP_WORKERS: + try: + UNIT = getattr(Units, conf.SPEED_UNIT.lower()) + if UNIT is Units.miles: + UNIT_STRING = "MPH" + elif UNIT is Units.kilometers: + UNIT_STRING = "KMH" + elif UNIT is Units.meters: + UNIT_STRING = "m/h" + except AttributeError: + UNIT_STRING = "MPH" def get_args(): parser = ArgumentParser() @@ -62,22 +73,56 @@ def data(self): def get_worker_markers(workers): return [{ - 'lat': location[0], - 'lon': location[1], + 'lat': lat, + 'lon': lon, 'worker_no': worker_no, 'time': datetime.fromtimestamp(timestamp).strftime('%I:%M:%S %p'), - 'speed': '{:.1f}m/s'.format(speed), + 'speed': '{:.1f}{}'.format(speed, UNIT_STRING), 'total_seen': total_seen, 'visits': visits, 'seen_here': seen_here - } for worker_no, (location, timestamp, speed, total_seen, visits, seen_here) in workers.data] + } for worker_no, ((lat, lon), timestamp, speed, total_seen, visits, seen_here) in workers.data] + + +def sighting_to_marker(pokemon, names=POKEMON, moves=MOVES, damage=DAMAGE): + pokemon_id = pokemon.pokemon_id + marker = { + 'id': 'pokemon-' + str(pokemon.id), + 'trash': pokemon_id in conf.TRASH_IDS, + 'name': names[pokemon_id], + 'pokemon_id': pokemon_id, + 'lat': pokemon.lat, + 'lon': pokemon.lon, + 'expires_at': pokemon.expire_timestamp, + } + move1 = pokemon.move_1 + if pokemon.move_1: + move2 = pokemon.move_2 + marker['atk'] = pokemon.atk_iv + marker['def'] = pokemon.def_iv + marker['sta'] = pokemon.sta_iv + marker['move1'] = moves[move1] + marker['move2'] = moves[move2] + marker['damage1'] = damage[move1] + marker['damage2'] = damage[move2] + return marker + + +def get_pokemarkers(after_id=0): + with session_scope() as session: + pokemons = session.query(Sighting) \ + .filter(Sighting.expire_timestamp > time(), + Sighting.id > after_id) + if conf.MAP_FILTER_IDS: + pokemons = pokemons.filter(~Sighting.pokemon_id.in_(conf.MAP_FILTER_IDS)) + return tuple(map(sighting_to_marker, pokemons)) def get_gym_markers(names=POKEMON): with session_scope() as session: forts = get_forts(session) return [{ - 'id': 'fort-' + repr(fort['fort_id']), + 'id': 'fort-' + str(fort['fort_id']), 'sighting_id': fort['id'], 'prestige': fort['prestige'], 'pokemon_id': fort['guard_pokemon_id'], @@ -88,6 +133,52 @@ def get_gym_markers(names=POKEMON): } for fort in forts] +def get_spawnpoint_markers(): + with session_scope() as session: + spawns = session.query(Spawnpoint) + return [{ + 'spawn_id': spawn.spawn_id, + 'despawn_time': spawn.despawn_time, + 'lat': spawn.lat, + 'lon': spawn.lon, + 'duration': spawn.duration + } for spawn in spawns] + +if conf.BOUNDARIES: + from shapely.geometry import mapping + + def get_scan_coords(): + coordinates = mapping(conf.BOUNDARIES)['coordinates'] + coords = coordinates[0] + markers = [{ + 'type': 'scanarea', + 'coords': coords + }] + for blacklist in coordinates[1:]: + markers.append({ + 'type': 'scanblacklist', + 'coords': blacklist + }) + return markers +else: + def get_scan_coords(): + return ({ + 'type': 'scanarea', + 'coords': (conf.MAP_START, (conf.MAP_START[0], conf.MAP_END[1]), + conf.MAP_END, (conf.MAP_END[0], conf.MAP_START[1]), conf.MAP_START) + },) + + +def get_pokestop_markers(): + with session_scope() as session: + pokestops = session.query(Pokestop) + return [{ + 'external_id': pokestop.external_id, + 'lat': pokestop.lat, + 'lon': pokestop.lon + } for pokestop in pokestops] + + def sighting_to_report_marker(sighting): return { 'icon': 'static/monocle-icons/icons/{}.png'.format(sighting.pokemon_id), diff --git a/monocle/worker.py b/monocle/worker.py index a0413441e..971e6696d 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -9,26 +9,39 @@ from aiopogo import PGoApi, HashServer, json_loads, exceptions as ex from aiopogo.auth_ptc import AuthPtc from cyrandom import choice, randint, uniform -from pogeo import Location -from pogeo.utils import location_to_cellid, location_to_token +from pogeo import get_distance -from .altitudes import load_alts, set_altitude from .db import FORT_CACHE, MYSTERY_CACHE, SIGHTING_CACHE -from .utils import load_pickle, get_device_info, get_start_coords +from .utils import round_coords, load_pickle, get_device_info, get_start_coords, Units, randomize_point from .shared import get_logger, LOOP, SessionManager, run_threaded, ACCOUNTS -from . import avatar, bounds, db_proc, spawns, sanitized as conf +from . import altitudes, avatar, bounds, db_proc, spawns, sanitized as conf if conf.NOTIFY: from .notification import Notifier if conf.CACHE_CELLS: - from pogeo import CellCache - CELL_CACHE = load_pickle('cellcache') or CellCache() - get_cell_ids = CELL_CACHE.get_cell_ids + from array import typecodes + if 'Q' in typecodes: + from pogeo import get_cell_ids_compact as _pogeo_cell_ids + else: + from pogeo import get_cell_ids as _pogeo_cell_ids else: - from pogeo import get_cell_ids + from pogeo import get_cell_ids as _pogeo_cell_ids + -load_alts() +_unit = getattr(Units, conf.SPEED_UNIT.lower()) +if conf.SPIN_POKESTOPS: + if _unit is Units.miles: + SPINNING_SPEED_LIMIT = 21 + UNIT_STRING = "MPH" + elif _unit is Units.kilometers: + SPINNING_SPEED_LIMIT = 34 + UNIT_STRING = "KMH" + elif _unit is Units.meters: + SPINNING_SPEED_LIMIT = 34000 + UNIT_STRING = "m/h" +UNIT = _unit.value +del _unit class Worker: @@ -36,8 +49,22 @@ class Worker: download_hash = '' scan_delay = conf.SCAN_DELAY if conf.SCAN_DELAY >= 10 else 10 - seen = 0 - captchas = 0 + g = {'seen': 0, 'captchas': 0} + + if conf.CACHE_CELLS: + cells = load_pickle('cells') or {} + + @classmethod + def get_cell_ids(cls, point): + rounded = round_coords(point, 4) + try: + return cls.cells[rounded] + except KeyError: + cells = _pogeo_cell_ids(rounded) + cls.cells[rounded] = cells + return cells + else: + get_cell_ids = _pogeo_cell_ids login_semaphore = Semaphore(conf.SIMULTANEOUS_LOGINS, loop=LOOP) sim_semaphore = Semaphore(conf.SIMULTANEOUS_SIMULATION, loop=LOOP) @@ -66,9 +93,10 @@ def __init__(self, worker_no): raise ValueError("You don't have enough accounts for the number of workers specified in GRID.") from e self.username = self.account['username'] try: - self.location = self.account['loc'] - except (KeyError, TypeError): + self.location = self.account['location'][:2] + except KeyError: self.location = get_start_coords(worker_no) + self.altitude = None # last time of any request self.last_request = self.account.get('time', 0) # last time of a request that requires user interaction in the game @@ -99,14 +127,13 @@ def __init__(self, worker_no): self.pokestops = conf.SPIN_POKESTOPS self.next_spin = 0 self.handle = HandleStub() - self.start_time = monotonic() def initialize_api(self): device_info = get_device_info(self.account) self.empty_visits = 0 self.api = PGoApi(device_info=device_info) - self.api.position = self.location + self.api.set_position(*self.location, self.altitude) if self.proxies: self.api.proxy = next(self.proxies) try: @@ -158,7 +185,7 @@ async def login(self, reauth=False): raise err self.error_code = '°' - version = 6304 + version = 6301 async with self.sim_semaphore: self.error_code = 'APP SIMULATION' if conf.APP_SIMULATION: @@ -481,7 +508,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False for attempt in range(-1, conf.MAX_RETRIES): try: responses = await request.call() - self.location.update_time() + self.last_request = time() err = None break except (ex.NotLoggedInException, ex.AuthException) as e: @@ -525,7 +552,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False except ex.BadRPCException: raise except ex.InvalidRPCException as e: - self.location.update_time() + self.last_request = time() if not isinstance(e, type(err)): err = e self.log.warning('{}', e) @@ -544,7 +571,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False self.log.error('{}', e) await sleep(5, loop=LOOP) except (ex.MalformedResponseException, ex.UnexpectedResponseException) as e: - self.location.update_time() + self.last_request = time() if not isinstance(e, type(err)): self.log.warning('{}', e) self.error_code = 'MALFORMED RESPONSE' @@ -554,7 +581,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False if action: # pad for time that action would require - self.last_action = self.location.time + action + self.last_action = self.last_request + action try: delta = responses['GET_INVENTORY'].inventory_delta @@ -582,7 +609,7 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False try: challenge_url = responses['CHECK_CHALLENGE'].challenge_url if challenge_url != ' ': - Worker.captchas += 1 + self.g['captchas'] += 1 if conf.CAPTCHA_KEY: self.log.warning('{} has encountered a CAPTCHA, trying to solve', self.username) await self.handle_captcha(challenge_url) @@ -592,30 +619,43 @@ async def call(self, request, chain=True, stamp=True, buddy=True, settings=False pass return responses + def travel_speed(self, point): + '''Fast calculation of travel speed to point''' + time_diff = max(time() - self.last_request, self.scan_delay) + distance = get_distance(self.location, point, UNIT) + # conversion from seconds to hours + speed = (distance / time_diff) * 3600 + return speed + async def bootstrap_visit(self, point): for _ in range(3): - if await self.visit_point(point, bootstrap=True): + if await self.visit(point, bootstrap=True): return True self.error_code = '∞' - self.location.jitter(0.00005, 0.00005, 0.5) + self.simulate_jitter(0.00005) return False - async def visit_point(self, point, spawn_id=None, bootstrap=False): - """Wrapper for visit that sets location and logs in if necessary + async def visit(self, point, spawn_id=None, bootstrap=False): + """Wrapper for self.visit_point - runs it a few times before giving up + + Also is capable of restarting in case an error occurs. """ try: - set_altitude(point) + try: + self.altitude = altitudes.get(point) + except KeyError: + self.altitude = await altitudes.fetch(point) self.location = point - self.api.position = self.location + self.api.set_position(*self.location, self.altitude) if not self.authenticated: await self.login() - return await self.visit(spawn_id, bootstrap) + return await self.visit_point(point, spawn_id, bootstrap) except ex.NotLoggedInException: self.error_code = 'NOT AUTHENTICATED' await sleep(1, loop=LOOP) if not await self.login(reauth=True): await self.swap_account(reason='reauth failed') - return await self.visit(spawn_id, bootstrap) + return await self.visit(point, spawn_id, bootstrap) except ex.AuthException as e: self.log.warning('Auth error on {}: {}', self.username, e) self.error_code = 'NOT AUTHENTICATED' @@ -623,7 +663,7 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): await self.swap_account(reason='login failed') except CaptchaException: self.error_code = 'CAPTCHA' - Worker.captchas += 1 + self.g['captchas'] += 1 await sleep(1, loop=LOOP) await self.bench_account() except CaptchaSolveException: @@ -679,7 +719,7 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): self.error_code = 'MALFORMED RESPONSE' except EmptyGMOException as e: self.error_code = '0' - self.log.warning('Empty GetMapObjects response for {}. Speed: {:.2f}m/s', self.username, self.speed) + self.log.warning('Empty GetMapObjects response for {}. Speed: {:.2f}', self.username, self.speed) except ex.HashServerException as e: self.log.warning('{}', e) self.error_code = 'HASHING ERROR' @@ -693,34 +733,38 @@ async def visit_point(self, point, spawn_id=None, bootstrap=False): self.error_code = 'EXCEPTION' return False - async def visit(self, spawn_id, bootstrap, - encounter_conf=conf.ENCOUNTER, notify_conf=conf.NOTIFY): + async def visit_point(self, point, spawn_id, bootstrap, + encounter_conf=conf.ENCOUNTER, notify_conf=conf.NOTIFY, + more_points=conf.MORE_POINTS): self.handle.cancel() self.error_code = '∞' if bootstrap else '!' - self.log.info('Visiting {0[0]:.4f}, {0[1]:.4f}', self.location) + self.log.info('Visiting {0[0]:.4f},{0[1]:.4f}', point) + start = time() - cell_ids = get_cell_ids(self.location) + cell_ids = self.get_cell_ids(point) + since_timestamp_ms = (0,) * len(cell_ids) request = self.api.create_request() request.get_map_objects(cell_id=cell_ids, - since_timestamp_ms=(0,) * len(cell_ids), - latitude=self.location[0], - longitude=self.location[1]) + since_timestamp_ms=since_timestamp_ms, + latitude=point[0], + longitude=point[1]) diff = self.last_gmo + self.scan_delay - time() if diff > 0: await sleep(diff, loop=LOOP) responses = await self.call(request) - self.last_gmo = self.location.time + self.last_gmo = self.last_request try: map_objects = responses['GET_MAP_OBJECTS'] if map_objects.status != 1: - error = 'GetMapObjects code {} for {}. Speed: {:.2f}m/s'.format(map_objects.status, self.username, self.speed) + error = 'GetMapObjects code for {}. Speed: {:.2f}'.format(self.username, self.speed) self.empty_visits += 1 if self.empty_visits > 3: - await self.swap_account('{} empty visits'.format(self.empty_visits)) + reason = '{} empty visits'.format(self.empty_visits) + await self.swap_account(reason) raise ex.UnexpectedResponseException(error) except KeyError: await self.random_sleep(.5, 1) @@ -729,6 +773,7 @@ async def visit(self, spawn_id, bootstrap, pokemon_seen = 0 forts_seen = 0 + points_seen = 0 seen_target = not spawn_id if conf.ITEM_LIMITS and self.bag_items >= self.item_capacity: @@ -779,7 +824,7 @@ async def visit(self, spawn_id, bootstrap, db_proc.add(norm) if (self.pokestops and self.bag_items < self.item_capacity - and monotonic() > self.next_spin + and time() > self.next_spin and (not conf.SMART_THROTTLE or self.smart_throttle(2))): cooldown = fort.cooldown_complete_timestamp_ms @@ -791,6 +836,17 @@ async def visit(self, spawn_id, bootstrap, elif fort not in FORT_CACHE: db_proc.add(self.normalize_gym(fort)) + if more_points: + try: + for p in map_cell.spawn_points: + points_seen += 1 + p = p.latitude, p.longitude + if spawns.have_point(p) or p not in bounds: + continue + spawns.cell_points.add(p) + except KeyError: + pass + if spawn_id: db_proc.add({ 'type': 'target', @@ -804,12 +860,12 @@ async def visit(self, spawn_id, bootstrap, if pokemon_seen > 0: self.error_code = ':' self.total_seen += pokemon_seen - Worker.seen += pokemon_seen + self.g['seen'] += pokemon_seen self.empty_visits = 0 else: self.empty_visits += 1 if forts_seen == 0: - self.log.warning('Nothing seen by {}. Speed: {:.2f}m/s', self.username, self.speed) + self.log.warning('Nothing seen by {}. Speed: {:.2f}', self.username, self.speed) self.error_code = '0 SEEN' else: self.error_code = ',' @@ -819,16 +875,18 @@ async def visit(self, spawn_id, bootstrap, self.visits += 1 if conf.MAP_WORKERS: - self.worker_dict[self.worker_no] = ( - self.location, self.location.time,self.speed, self.total_seen, - self.visits, pokemon_seen) + self.worker_dict.update([(self.worker_no, + (point, start, self.speed, self.total_seen, + self.visits, pokemon_seen))]) self.log.info( 'Point processed, {} Pokemon and {} forts seen!', - pokemon_seen, forts_seen) + pokemon_seen, + forts_seen, + ) self.update_accounts_dict() self.handle = LOOP.call_later(60, self.unset_code) - return pokemon_seen + forts_seen + return pokemon_seen + forts_seen + points_seen def smart_throttle(self, requests=1): try: @@ -844,30 +902,30 @@ def smart_throttle(self, requests=1): async def spin_pokestop(self, pokestop): self.error_code = '$' - pokestop_location = Location(pokestop.latitude, pokestop.longitude) - distance = self.location.distance(pokestop_location) - # permitted interaction distance - 2 (for some jitter/calculation leeway) + pokestop_location = pokestop.latitude, pokestop.longitude + distance = get_distance(self.location, pokestop_location) + # permitted interaction distance - 4 (for some jitter leeway) # estimation of spinning speed limit - if distance > 38.0 or self.speed > 8.611: + if distance > 36 or self.speed > SPINNING_SPEED_LIMIT: self.error_code = '!' return False # randomize location up to ~1.5 meters - self.location.jitter(0.00001, 0.00001, 0.25) + self.simulate_jitter(amount=0.00001) request = self.api.create_request() - request.fort_details(fort_id=pokestop.id, - latitude=pokestop_location[0], - longitude=pokestop_location[1]) + request.fort_details(fort_id = pokestop.id, + latitude = pokestop_location[0], + longitude = pokestop_location[1]) responses = await self.call(request, action=1.2) name = responses['FORT_DETAILS'].name request = self.api.create_request() - request.fort_search(fort_id=pokestop.id, - player_latitude=self.location[0], - player_longitude=self.location[1], - fort_latitude=pokestop_location[0], - fort_longitude=pokestop_location[1]) + request.fort_search(fort_id = pokestop.id, + player_latitude = self.location[0], + player_longitude = self.location[1], + fort_latitude = pokestop_location[0], + fort_longitude = pokestop_location[1]) responses = await self.call(request, action=2) try: @@ -880,8 +938,8 @@ async def spin_pokestop(self, pokestop): if result == 1: self.log.info('Spun {}.', name) elif result == 2: - self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}m/s', - name, distance, self.speed) + self.log.info('The server said {} was out of spinning range. {:.1f}m {:.1f}{}', + name, distance, self.speed, UNIT_STRING) elif result == 3: self.log.warning('{} was in the cooldown period.', name) elif result == 4: @@ -894,22 +952,26 @@ async def spin_pokestop(self, pokestop): else: self.log.warning('Failed spinning {}: {}', name, result) - self.next_spin = monotonic() + conf.SPIN_COOLDOWN + self.next_spin = time() + conf.SPIN_COOLDOWN self.error_code = '!' async def encounter(self, pokemon, spawn_id): - distance_to_pokemon = self.location.distance(Location(pokemon['lat'], pokemon['lon'])) + distance_to_pokemon = get_distance(self.location, (pokemon['lat'], pokemon['lon'])) self.error_code = '~' if distance_to_pokemon > 48: percent = 1 - (47 / distance_to_pokemon) - self.location[0] -= (self.location[0] - pokemon['lat']) * percent - self.location[1] -= (self.location[1] - pokemon['lon']) * percent - self.jitter(0.000001, 0.000001, 1) + lat_change = (self.location[0] - pokemon['lat']) * percent + lon_change = (self.location[1] - pokemon['lon']) * percent + self.location = ( + self.location[0] - lat_change, + self.location[1] - lon_change) + self.altitude = uniform(self.altitude - 2, self.altitude + 2) + self.api.set_position(*self.location, self.altitude) delay_required = min((distance_to_pokemon * percent) / 8, 1.1) else: - self.jitter(0.00001, 0.00001, .3) + self.simulate_jitter() delay_required = 1.1 await self.random_sleep(delay_required, delay_required + 1.5) @@ -941,12 +1003,12 @@ async def clean_bag(self): rec_items = {} limits = conf.ITEM_LIMITS for item, count in self.items.items(): - try: + if item in limits and count > limits[item]: discard = count - limits[item] - if discard > 0: - rec_items[item] = discard if discard <= 50 else randint(50, discard) - except KeyError: - pass + if discard > 50: + rec_items[item] = randint(50, discard) + else: + rec_items[item] = discard removed = 0 for item, count in rec_items.items(): @@ -955,10 +1017,10 @@ async def clean_bag(self): responses = await self.call(request, action=2) try: - if responses['RECYCLE_INVENTORY_ITEM'].result == 1: - removed += count + if responses['RECYCLE_INVENTORY_ITEM'].result != 1: + self.log.warning("Failed to remove item {}", item) else: - self.log.warning("Failed to remove item {}, code: {}", item, result) + removed += count except KeyError: self.log.warning("Failed to remove item {}", item) self.log.info("Removed {} items", removed) @@ -1011,8 +1073,6 @@ async def handle_captcha(self, challenge_url): } async with session.post('http://2captcha.com/in.php', params=params) as resp: response = await resp.json(loads=json_loads) - except KeyError: - self.log.error('Challenge URL not found in response.') except CancelledError: raise except Exception as e: @@ -1061,8 +1121,14 @@ async def handle_captcha(self, challenge_url): self.update_accounts_dict() self.log.warning("Successfully solved CAPTCHA") + def simulate_jitter(self, amount=0.00002): + '''Slightly randomize location, by up to ~3 meters by default.''' + self.location = randomize_point(self.location) + self.altitude = uniform(self.altitude - 1, self.altitude + 1) + self.api.set_position(*self.location, self.altitude) + def update_accounts_dict(self): - self.account['loc'] = self.location + self.account['location'] = self.location self.account['time'] = self.last_request self.account['inventory_timestamp'] = self.inventory_timestamp if self.player_level: @@ -1123,12 +1189,14 @@ async def new_account(self): self.account = await run_threaded(self.extra_queue.get) self.username = self.account['username'] try: - self.location = self.account['loc'] + self.location = self.account['location'][:2] except KeyError: self.location = get_start_coords(self.worker_no) self.inventory_timestamp = self.account.get('inventory_timestamp', 0) if self.items else 0 self.player_level = self.account.get('level') - self.last_action = self.last_gmo = self.location.time + self.last_request = self.account.get('time', 0) + self.last_action = self.last_request + self.last_gmo = self.last_request try: self.items = self.account['items'] self.bag_items = sum(self.items.values()) @@ -1140,7 +1208,6 @@ async def new_account(self): self.unused_incubators = deque() self.initialize_api() self.error_code = None - self.start_time = monotonic() def unset_code(self): self.error_code = None @@ -1175,15 +1242,16 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): return norm @staticmethod - def normalize_lured(raw, now, sid=location_to_cellid if conf.SPAWN_ID_INT else location_to_token): + def normalize_lured(raw, now): lure = raw.lure_info - loc = Location(raw.latitude, raw.longitude) return { 'type': 'pokemon', 'encounter_id': lure.encounter_id, 'pokemon_id': lure.active_pokemon_id, 'expire_timestamp': lure.lure_expires_timestamp_ms // 1000, - 'spawn_id': sid(loc, 25), + 'lat': raw.latitude, + 'lon': raw.longitude, + 'spawn_id': 0 if conf.SPAWN_ID_INT else 'LURED', 'time_till_hidden': (lure.lure_expires_timestamp_ms - now) / 1000, 'inferred': 'pokestop' } @@ -1215,6 +1283,10 @@ async def random_sleep(minimum=10.1, maximum=14, loop=LOOP): """Sleeps for a bit""" await sleep(uniform(minimum, maximum), loop=loop) + @property + def start_time(self): + return self.api.start_time + @property def status(self): """Returns status message to be displayed in status screen""" diff --git a/optional-requirements.txt b/optional-requirements.txt index 1759cf9bb..1cdbc93e8 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -1,5 +1,7 @@ asyncpushbullet>=0.12 peony-twitter>=0.9.3 +gpsoauth>=0.4.0 +shapely>=1.3.0 selenium>=3.0 uvloop>=0.7.0 gpsoauth>=0.4.0 @@ -8,6 +10,6 @@ cchardet>=2.1.0 aiodns>=1.1.0 aiosocks>=0.2.2 ujson>=1.35 -sanic>=0.5 +sanic>=0.3 asyncpg>=0.8 mysqlclient>=1.3 diff --git a/requirements.txt b/requirements.txt index cf7b10960..61c26b85c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,9 @@ +geopy>=1.11.0 protobuf>=3.0.0 +flask>=0.11.1 sqlalchemy>=1.1.0 -aiopogo>=2.1.0b0 +aiopogo==2.0.* +polyline>=1.3.1 aiohttp>=2.0.7 -pogeo>=0.4b0 -cyrandom>=0.3.0 -flask>=0.11.1 +pogeo==0.3.* +cyrandom>=0.1.2 diff --git a/scan.py b/scan.py index 109b76730..433929a3c 100755 --- a/scan.py +++ b/scan.py @@ -28,7 +28,7 @@ from monocle.worker import Worker from monocle.overseer import Overseer from monocle.db import FORT_CACHE -from monocle import db_proc, spawns +from monocle import altitudes, db_proc, spawns class AccountManager(BaseManager): @@ -151,9 +151,9 @@ def cleanup(overseer, manager): print('Dumping pickles...') dump_pickle('accounts', ACCOUNTS) FORT_CACHE.pickle() + altitudes.pickle() if conf.CACHE_CELLS: - from monocle.worker import CELL_CACHE - dump_pickle('cellcache', CELL_CACHE) + dump_pickle('cells', Worker.cells) spawns.pickle() while not db_proc.queue.empty(): diff --git a/scripts/pickle_landmarks.example.py b/scripts/pickle_landmarks.example.py new file mode 100644 index 000000000..96cc8cf59 --- /dev/null +++ b/scripts/pickle_landmarks.example.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import sys +import pickle + +from pathlib import Path + +monocle_dir = Path(__file__).resolve().parents[1] +sys.path.append(str(monocle_dir)) + +from monocle.landmarks import Landmarks + +pickle_dir = monocle_dir / 'pickles' +pickle_dir.mkdir(exist_ok=True) + +LANDMARKS = Landmarks(query_suffix='Salt Lake City') + +# replace the following with your own landmarks +LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'}) +LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'}) +LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'}) +LANDMARKS.add('the State Capitol', query='Utah State Capitol Building') +LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True) +LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) + +pickle_path = pickle_dir / 'landmarks.pickle' +with pickle_path.open('wb') as f: + pickle.dump(LANDMARKS, f, pickle.HIGHEST_PROTOCOL) + + +print('\033[94mDone. Now add the following to your config:\n\033[92m', + 'import pickle', + "with open('pickles/landmarks.pickle', 'rb') as f:", + ' LANDMARKS = pickle.load(f)', + sep='\n') diff --git a/scripts/test_notifications.py b/scripts/test_notifications.py index 965dbb6a9..ef7476eba 100755 --- a/scripts/test_notifications.py +++ b/scripts/test_notifications.py @@ -53,7 +53,7 @@ if args.id == 0: names.POKEMON[0] = 'Test' else: - pokemon_id = randint(1, 251) + pokemon_id = randint(1, 252) if not args.unmodified: conf.ALWAYS_NOTIFY_IDS = {pokemon_id} diff --git a/setup.py b/setup.py index d9f23a224..d5660f9fc 100755 --- a/setup.py +++ b/setup.py @@ -1,9 +1,13 @@ #!/usr/bin/env python3 from setuptools import setup +from os.path import exists +from shutil import copyfile from monocle import __version__ as version, __title__ as name +if not exists('monocle/config.py'): + copyfile('config.example.py', 'monocle/config.py') setup( name=name, @@ -13,24 +17,29 @@ zip_safe=False, scripts=('scan.py', 'web.py', 'web_sanic.py', 'gyms.py', 'solve_captchas.py'), install_requires=[ + 'geopy>=1.11.0', 'protobuf>=3.0.0', + 'flask>=0.11.1', + 'gpsoauth>=0.4.0', + 'werkzeug>=0.11.15', 'sqlalchemy>=1.1.0', - 'aiopogo>=2.1.0b0', - 'aiohttp>=2.0.7', - 'pogeo>=0.4b0', - 'cyrandom>=0.3.0', - 'flask>=0.11.1' + 'aiopogo>=1.8.0', + 'polyline>=1.3.1', + 'aiohttp>=2.0.7,<2.1', + 'pogeo==0.3.*', + 'cyrandom>=0.1.2' ], extras_require={ 'twitter': ['peony-twitter>=0.9.3'], 'pushbullet': ['asyncpushbullet>=0.12'], + 'landmarks': ['shapely>=1.3.0'], + 'boundaries': ['shapely>=1.3.0'], 'manual_captcha': ['selenium>=3.0'], 'performance': ['uvloop>=0.7.0', 'cchardet>=1.1.0', 'aiodns>=1.1.0', 'ujson>=1.35'], 'mysql': ['mysqlclient>=1.3'], 'postgres': ['psycopg2>=2.6'], 'images': ['pycairo>=1.10.0'], 'socks': ['aiosocks>=0.2.2'], - 'sanic': ['sanic>=0.5', 'asyncpg>=0.8', 'ujson>=1.35'], - 'google_accounts': ['gpsoauth>=0.4.0'] + 'sanic': ['sanic>=0.4', 'asyncpg>=0.8', 'ujson>=1.35'] } ) diff --git a/solve_captchas.py b/solve_captchas.py index d73dd742d..bc9864bc9 100755 --- a/solve_captchas.py +++ b/solve_captchas.py @@ -11,9 +11,8 @@ from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.ui import WebDriverWait -from monocle import sanitized as conf -from monocle.altitudes import load_alts, set_altitude -from monocle.utils import get_device_info, get_address +from monocle import altitudes, sanitized as conf +from monocle.utils import get_device_info, get_address, randomize_point from monocle.bounds import center @@ -46,7 +45,6 @@ async def solve_captcha(url, api, driver, timestamp): async def main(): try: - load_alts() class AccountManager(BaseManager): pass AccountManager.register('captcha_queue') AccountManager.register('extra_queue') @@ -63,18 +61,22 @@ class AccountManager(BaseManager): pass while not captcha_queue.empty(): account = captcha_queue.get() username = account.get('username') - try: - location = account['loc'] - except Exception: - location = center - location.jitter(0.0001, 0.0001, 2.0) + location = account.get('location') + if location and location != (0,0,0): + lat = location[0] + lon = location[1] + else: + lat, lon = randomize_point(center, 0.0001) - set_altitude(location) + try: + alt = altitudes.get((lat, lon)) + except KeyError: + alt = await altitudes.fetch((lat, lon)) try: device_info = get_device_info(account) api = PGoApi(device_info=device_info) - api.location = location + api.set_position(lat, lon, alt) authenticated = False try: @@ -109,7 +111,7 @@ class AccountManager(BaseManager): pass challenge_url = responses['CHECK_CHALLENGE'].challenge_url timestamp = responses['GET_INVENTORY'].inventory_delta.new_timestamp_ms - account['location'] = location + account['location'] = lat, lon account['inventory_timestamp'] = timestamp if challenge_url == ' ': account['captcha'] = False diff --git a/web.py b/web.py index 7561cf63b..478f3dfce 100755 --- a/web.py +++ b/web.py @@ -10,12 +10,12 @@ except ImportError: from json import dumps -from flask import Flask, jsonify, make_response, Markup, render_template, request -from pogeo.monotools.sightingcache import SightingCache -from pogeo.monotools.spawncache import SpawnCache +from flask import Flask, jsonify, Markup, render_template, request -from monocle import bounds, db, names, sanitized as conf +from monocle import db, sanitized as conf +from monocle.names import POKEMON from monocle.web_utils import * +from monocle.bounds import area, center app = Flask(__name__, template_folder=resource_filename('monocle', 'templates'), static_folder=resource_filename('monocle', 'static')) @@ -52,7 +52,7 @@ def render_map(): template = app.jinja_env.get_template('custom.html' if conf.LOAD_CUSTOM_HTML_FILE else 'newmap.html') return template.render( area_name=conf.AREA_NAME, - map_center=bounds.center, + map_center=center, map_provider_url=conf.MAP_PROVIDER_URL, map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION, social_links=social_links(), @@ -65,7 +65,7 @@ def render_worker_map(): template = app.jinja_env.get_template('workersmap.html') return template.render( area_name=conf.AREA_NAME, - map_center=bounds.center, + map_center=center, map_provider_url=conf.MAP_PROVIDER_URL, map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION, social_links=social_links() @@ -78,22 +78,9 @@ def fullmap(map_html=render_map()): @app.route('/data') -def pokemon_data( - cache=SightingCache(conf, db, names), - _resp=make_response): - try: - compress = 'gzip' in request.headers['Accept-Encoding'].lower() - except KeyError: - compress = False - try: - last_id = int(request.args['last_id']) - except KeyError: - last_id = 0 - response = _resp(cache.get_json(last_id, compress)) - response.mimetype = 'application/json' - if compress: - response.headers['Content-Encoding'] = 'gzip' - return response +def pokemon_data(): + last_id = request.args.get('last_id', 0) + return jsonify(get_pokemarkers(last_id)) @app.route('/gym_data') @@ -102,29 +89,18 @@ def gym_data(): @app.route('/spawnpoints') -def spawn_points( - cache=SpawnCache(conf.SPAWN_ID_INT, db), - _resp=make_response): - compress = 'gzip' in request.headers.get('Accept-Encoding', '').lower() - response = _resp(cache.get_json(compress)) - response.mimetype = 'application/json' - if compress: - response.headers['Content-Encoding'] = 'gzip' - return response +def spawn_points(): + return jsonify(get_spawnpoint_markers()) @app.route('/pokestops') -def get_pokestops(_scope=db.session_scope, _stop=db.Pokestop): - with _scope() as session: - pokestops = session.query(_stop.external_id, _stop.lat, _stop.lon) - return jsonify(pokestops.all()) +def get_pokestops(): + return jsonify(get_pokestop_markers()) @app.route('/scan_coords') -def scan_coords(_coords=bounds.json, _resp=make_response): - response = _resp(_coords) - response.mimetype = 'application/json' - return response +def scan_coords(): + return jsonify(get_scan_coords()) if conf.MAP_WORKERS: @@ -143,7 +119,7 @@ def workers_map(map_html=render_worker_map()): @app.route('/report') def report_main(area_name=conf.AREA_NAME, - names=names.POKEMON, + names=POKEMON, key=conf.GOOGLE_MAPS_KEY if conf.REPORT_MAPS else None): with db.session_scope() as session: counts = db.get_sightings_per_pokemon(session) @@ -177,7 +153,7 @@ def report_main(area_name=conf.AREA_NAME, 'maps_data': { 'rare': [sighting_to_report_marker(s) for s in rare_sightings], }, - 'map_center': bounds.center, + 'map_center': center, 'zoom': 13, } icons = { @@ -191,7 +167,7 @@ def report_main(area_name=conf.AREA_NAME, 'report.html', current_date=datetime.now(), area_name=area_name, - area_size=bounds.area, + area_size=area, total_spawn_count=count, spawns_per_hour=count // session_stats['length_hours'], session_start=session_stats['start'], @@ -213,16 +189,16 @@ def report_single(pokemon_id, 'charts_data': { 'hours': db.get_spawns_per_hour(session, pokemon_id), }, - 'map_center': bounds.center, + 'map_center': center, 'zoom': 13, } return render_template( 'report_single.html', current_date=datetime.now(), area_name=area_name, - area_size=bounds.area, + area_size=area, pokemon_id=pokemon_id, - pokemon_name=names.POKEMON[pokemon_id], + pokemon_name=POKEMON[pokemon_id], total_spawn_count=db.get_total_spawns_count(session, pokemon_id), session_start=session_stats['start'], session_end=session_stats['end'], diff --git a/web_sanic.py b/web_sanic.py index 1572c88a0..fba1c6eac 100755 --- a/web_sanic.py +++ b/web_sanic.py @@ -3,22 +3,20 @@ from pkg_resources import resource_filename from time import time -from asyncpg import create_pool -from jinja2 import Environment, PackageLoader, Markup from sanic import Sanic -from sanic.response import html, HTTPResponse, json -from pogeo.monotools.aiosightingcache import AioSightingCache -from pogeo.monotools.aiospawncache import AioSpawnCache +from sanic.response import html, json +from jinja2 import Environment, PackageLoader, Markup +from asyncpg import create_pool -from monocle import bounds, names, sanitized as conf -from monocle.web_utils import get_worker_markers, Workers, get_args +from monocle import sanitized as conf +from monocle.bounds import center +from monocle.names import DAMAGE, MOVES, POKEMON +from monocle.web_utils import get_scan_coords, get_worker_markers, Workers, get_args env = Environment(loader=PackageLoader('monocle', 'templates')) app = Sanic(__name__) app.static('/static', resource_filename('monocle', 'static')) -_SIGHTINGS = AioSightingCache(conf, names) -_SPAWNS = AioSpawnCache(conf.SPAWN_ID_INT) def social_links(): @@ -52,7 +50,7 @@ def render_map(): template = env.get_template('custom.html' if conf.LOAD_CUSTOM_HTML_FILE else 'newmap.html') return html(template.render( area_name=conf.AREA_NAME, - map_center=bounds.center, + map_center=center, map_provider_url=conf.MAP_PROVIDER_URL, map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION, social_links=social_links(), @@ -65,7 +63,7 @@ def render_worker_map(): template = env.get_template('workersmap.html') return html(template.render( area_name=conf.AREA_NAME, - map_center=bounds.center, + map_center=center, map_provider_url=conf.MAP_PROVIDER_URL, map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION, social_links=social_links() @@ -95,21 +93,19 @@ async def workers_map(request, html_map=render_worker_map()): @app.get('/data') -async def pokemon_data(request, _cache=_SIGHTINGS): - try: - compress = 'gzip' in request.headers['Accept-Encoding'].lower() - except KeyError: - compress = False - body = await _cache.get_json(int(request.args.get('last_id', 0)), compress) - return HTTPResponse( - body_bytes=body, - content_type='application/json', - headers={'Content-Encoding': 'gzip'} if compress else None) - +async def pokemon_data(request, _time=time): + last_id = request.args.get('last_id', 0) + async with app.pool.acquire() as conn: + results = await conn.fetch(''' + SELECT id, pokemon_id, expire_timestamp, lat, lon, atk_iv, def_iv, sta_iv, move_1, move_2 + FROM sightings + WHERE expire_timestamp > {} AND id > {} + '''.format(_time(), last_id)) + return json(list(map(sighting_to_marker, results))) @app.get('/gym_data') -async def gym_data(request, names=names.POKEMON, _str=str): +async def gym_data(request, names=POKEMON, _str=str): async with app.pool.acquire() as conn: results = await conn.fetch(''' SELECT @@ -142,35 +138,51 @@ async def gym_data(request, names=names.POKEMON, _str=str): @app.get('/spawnpoints') -async def spawn_points(request, _cache=_SPAWNS): - try: - compress = 'gzip' in request.headers['Accept-Encoding'].lower() - except KeyError: - compress = False - body = await _cache.get_json(compress) - return HTTPResponse( - body_bytes=body, - content_type='application/json', - headers={'Content-Encoding': 'gzip'} if compress else None) +async def spawn_points(request, _dict=dict): + async with app.pool.acquire() as conn: + results = await conn.fetch('SELECT spawn_id, despawn_time, lat, lon, duration FROM spawnpoints') + return json([_dict(x) for x in results]) @app.get('/pokestops') -async def get_pokestops(request): +async def get_pokestops(request, _dict=dict): async with app.pool.acquire() as conn: results = await conn.fetch('SELECT external_id, lat, lon FROM pokestops') - return json(results) + return json([_dict(x) for x in results]) @app.get('/scan_coords') -async def scan_coords(request, _response=HTTPResponse(body_bytes=bounds.json, content_type='application/json')): - return _response +async def scan_coords(request): + return json(get_scan_coords()) + + +def sighting_to_marker(pokemon, names=POKEMON, moves=MOVES, damage=DAMAGE, trash=conf.TRASH_IDS, _str=str): + pokemon_id = pokemon['pokemon_id'] + marker = { + 'id': 'pokemon-' + _str(pokemon['id']), + 'trash': pokemon_id in trash, + 'name': names[pokemon_id], + 'pokemon_id': pokemon_id, + 'lat': pokemon['lat'], + 'lon': pokemon['lon'], + 'expires_at': pokemon['expire_timestamp'], + } + move1 = pokemon['move_1'] + if move1: + move2 = pokemon['move_2'] + marker['atk'] = pokemon['atk_iv'] + marker['def'] = pokemon['def_iv'] + marker['sta'] = pokemon['sta_iv'] + marker['move1'] = moves[move1] + marker['move2'] = moves[move2] + marker['damage1'] = damage[move1] + marker['damage2'] = damage[move2] + return marker @app.listener('before_server_start') async def register_db(app, loop): - app.pool = await create_pool(dsn=conf.DB_ENGINE, loop=loop) - _SIGHTINGS.initialize(loop, app.pool) - _SPAWNS.initialize(loop, app.pool) + app.pool = await create_pool(**conf.DB, loop=loop) def main(): From f687ebc5c4b6ef18ffb5ca7d0b154e33744132b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 08:18:13 +0200 Subject: [PATCH 25/38] 6304 --- monocle/worker.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/monocle/worker.py b/monocle/worker.py index 971e6696d..fd3d2d4bd 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -157,7 +157,7 @@ async def login(self, reauth=False): for attempt in range(-1, conf.MAX_RETRIES): try: - self.error_code = '»' + self.error_code = '_' async with self.login_semaphore: self.error_code = 'LOGIN' await self.api.set_authentication( @@ -184,8 +184,8 @@ async def login(self, reauth=False): if err: raise err - self.error_code = '°' - version = 6301 + self.error_code = '-' + version = 6304 async with self.sim_semaphore: self.error_code = 'APP SIMULATION' if conf.APP_SIMULATION: @@ -631,7 +631,7 @@ async def bootstrap_visit(self, point): for _ in range(3): if await self.visit(point, bootstrap=True): return True - self.error_code = '∞' + self.error_code = '?' self.simulate_jitter(0.00005) return False @@ -737,7 +737,7 @@ async def visit_point(self, point, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify_conf=conf.NOTIFY, more_points=conf.MORE_POINTS): self.handle.cancel() - self.error_code = '∞' if bootstrap else '!' + self.error_code = '?' if bootstrap else '!' self.log.info('Visiting {0[0]:.4f},{0[1]:.4f}', point) start = time() @@ -789,9 +789,10 @@ async def visit_point(self, point, spawn_id, bootstrap, if (normalized not in SIGHTING_CACHE and normalized not in MYSTERY_CACHE): - if (encounter_conf == 'all' - or (encounter_conf == 'some' - and normalized['pokemon_id'] in conf.ENCOUNTER_IDS)): + # if (encounter_conf == 'all' + # or (encounter_conf == 'some' + # and normalized['pokemon_id'] in conf.ENCOUNTER_IDS)): + if self.player_level != None and self.player_level >= 30: try: await self.encounter(normalized, pokemon.spawn_point_id) except CancelledError: From f8e81909bc4c438d5b8a32c9a6aff30dc867177a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 08:40:13 +0200 Subject: [PATCH 26/38] adding cp --- monocle/db.py | 4 ++++ monocle/worker.py | 1 + 2 files changed, 5 insertions(+) diff --git a/monocle/db.py b/monocle/db.py index f1d1eeb28..0ca9e948d 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -225,6 +225,7 @@ class Sighting(Base): sta_iv = Column(TINY_TYPE) move_1 = Column(SmallInteger) move_2 = Column(SmallInteger) + cp = Column(SmallInteger) __table_args__ = ( UniqueConstraint( @@ -253,6 +254,7 @@ class Mystery(Base): sta_iv = Column(TINY_TYPE) move_1 = Column(SmallInteger) move_2 = Column(SmallInteger) + cp = Column(SmallInteger) __table_args__ = ( UniqueConstraint( @@ -355,6 +357,7 @@ def add_sighting(session, pokemon): sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), move_2=pokemon.get('move_2') + cp=pokemon.get('cp') ) session.add(obj) SIGHTING_CACHE.add(pokemon) @@ -454,6 +457,7 @@ def add_mystery(session, pokemon): sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), move_2=pokemon.get('move_2') + cp=pokemon.get('cp') ) session.add(obj) MYSTERY_CACHE.add(pokemon) diff --git a/monocle/worker.py b/monocle/worker.py index fd3d2d4bd..030970b6b 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -1223,6 +1223,7 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): 'type': 'pokemon', 'encounter_id': raw.encounter_id, 'pokemon_id': raw.pokemon_data.pokemon_id, + 'cp': raw.pokemon_data.cp, 'lat': raw.latitude, 'lon': raw.longitude, 'spawn_id': int(raw.spawn_point_id, 16) if spawn_int else raw.spawn_point_id, From 993ca617be4d7a4dae11c8f556c6fe676e79e657 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 08:42:37 +0200 Subject: [PATCH 27/38] adding cp --- monocle/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monocle/db.py b/monocle/db.py index 0ca9e948d..72f412977 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -356,7 +356,7 @@ def add_sighting(session, pokemon): def_iv=pokemon.get('individual_defense'), sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2') + move_2=pokemon.get('move_2'), cp=pokemon.get('cp') ) session.add(obj) @@ -456,7 +456,7 @@ def add_mystery(session, pokemon): def_iv=pokemon.get('individual_defense'), sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2') + move_2=pokemon.get('move_2'), cp=pokemon.get('cp') ) session.add(obj) From a1785e2e2b8fac921385e9f62a96a9a02316f8ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 14:01:57 +0200 Subject: [PATCH 28/38] Correction CP --- monocle/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monocle/worker.py b/monocle/worker.py index 030970b6b..75f815534 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -994,6 +994,7 @@ async def encounter(self, pokemon, spawn_id): pokemon['individual_stamina'] = pdata.individual_stamina pokemon['height'] = pdata.height_m pokemon['weight'] = pdata.weight_kg + pokemon['cp'] = pdata['cp'] pokemon['gender'] = pdata.pokemon_display.gender except KeyError: self.log.error('Missing encounter response.') @@ -1223,7 +1224,6 @@ def normalize_pokemon(raw, spawn_int=conf.SPAWN_ID_INT): 'type': 'pokemon', 'encounter_id': raw.encounter_id, 'pokemon_id': raw.pokemon_data.pokemon_id, - 'cp': raw.pokemon_data.cp, 'lat': raw.latitude, 'lon': raw.longitude, 'spawn_id': int(raw.spawn_point_id, 16) if spawn_int else raw.spawn_point_id, From eb4c1e8cd8bf3cbff9b168a8fd745bd1b99492a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Mon, 12 Jun 2017 14:06:16 +0200 Subject: [PATCH 29/38] CP --- monocle/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monocle/worker.py b/monocle/worker.py index 75f815534..eb08d305e 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -994,7 +994,7 @@ async def encounter(self, pokemon, spawn_id): pokemon['individual_stamina'] = pdata.individual_stamina pokemon['height'] = pdata.height_m pokemon['weight'] = pdata.weight_kg - pokemon['cp'] = pdata['cp'] + pokemon['cp'] = pdata.cp pokemon['gender'] = pdata.pokemon_display.gender except KeyError: self.log.error('Missing encounter response.') From 96d84c0a2c745e9e3f376b074d05b33ae5893e43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Wed, 21 Jun 2017 08:31:13 +0200 Subject: [PATCH 30/38] Adding lure on demand --- monocle/db.py | 30 ++++++++++++++++++++++++++++++ monocle/db_proc.py | 10 +++++++++- monocle/worker.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 67 insertions(+), 1 deletion(-) diff --git a/monocle/db.py b/monocle/db.py index 72f412977..b8e10028b 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -793,3 +793,33 @@ def get_all_spawn_coords(session, pokemon_id=None): if conf.REPORT_SINCE: points = points.filter(Sighting.expire_timestamp > SINCE_TIME) return points.all() + +def has_lure_to_add(session, pokestop_id): + query = session.execute(''' + SELECT + pokestops.id, + pokestops.lat, + pokestops.lon + FROM lureToAdd + INNER JOIN pokestops ON lureToAdd.pokestop_id = pokestops.id + WHERE pokestops.external_id = '{pokestop_id}' + '''.format( + pokestop_id=pokestop_id, + )) + log.warning('{} lure asked on {}.', query.rowcount, pokestop_id) + if query.rowcount == 0: + return False + else: + return True + +def del_lure_to_add(session, pokestop_id): + query = session.execute(''' + DELETE lureToAdd + FROM lureToAdd + INNER JOIN pokestops ON lureToAdd.pokestop_id = pokestops.id + WHERE pokestops.external_id = '{pokestop_id}' + '''.format( + pokestop_id=pokestop_id, + )) + session.commit() + log.warning('Deleting lure demand {}.', pokestop_id) diff --git a/monocle/db_proc.py b/monocle/db_proc.py index 772ad7d8f..0b812757f 100644 --- a/monocle/db_proc.py +++ b/monocle/db_proc.py @@ -26,7 +26,15 @@ def stop(self): self.queue.put({'type': False}) def add(self, obj): - self.queue.put(obj) + self.queue.put(obj) + + def lure_to_add(self, pokestop_id): + session = db.Session() + return db.has_lure_to_add(session, pokestop_id) + + def del_lure_to_add(self, pokestop_id): + session = db.Session() + db.del_lure_to_add(session, pokestop_id) def run(self): session = db.Session() diff --git a/monocle/worker.py b/monocle/worker.py index 905215af1..e76e22354 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -823,6 +823,8 @@ async def visit_point(self, point, spawn_id, bootstrap, pokemon_seen += 1 if norm not in SIGHTING_CACHE: db_proc.add(norm) + elif conf.LURE_ON_DEMAND: + await self.add_lure_pokestop(fort) if (self.pokestops and self.bag_items < self.item_capacity and time() > self.next_spin @@ -901,6 +903,32 @@ def smart_throttle(self, requests=1): except (TypeError, KeyError): return False + async def add_lure_pokestop(self, pokestop): + self.error_code = '$' + pokestop_location = pokestop.latitude, pokestop.longitude + distance = get_distance(self.location, pokestop_location) + # permitted interaction distance - 4 (for some jitter leeway) + # estimation of spinning speed limit + if distance > 36 or self.speed > SPINNING_SPEED_LIMIT: + self.error_code = '!' + return False + + # randomize location up to ~1.5 meters + self.simulate_jitter(amount=0.00001) + session = SessionManager.get() + if db_proc.lure_to_add(pokestop.id): + db_proc.del_lure_to_add(pokestop.id) + request = self.api.create_request() + self.log.warning('Request add_fort_modifier ITEM_TROY_DISK {} {} {}', pokestop.id, pokestop_location[0], pokestop_location[1]) + request.add_fort_modifier( + # modifier_type = ITEM_TROY_DISK, + modifier_type = 501, + fort_id = pokestop.id, + player_latitude = self.location[0], + player_longitude = self.location[1], + ) + responses = await self.call(request, action=1.1) + async def spin_pokestop(self, pokestop): self.error_code = '$' pokestop_location = pokestop.latitude, pokestop.longitude From 4ffff07a23724dd030ee9156dc5cf5dbe00f08b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Thu, 22 Jun 2017 08:33:40 +0200 Subject: [PATCH 31/38] LURE_ON_DEMAND default false --- monocle/sanitized.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 05aff696e..b81cbfc75 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -231,7 +231,8 @@ 'TWITTER_SCREEN_NAME': None, 'TZ_OFFSET': None, 'UVLOOP': True, - 'WEBHOOKS': None + 'WEBHOOKS': None, + 'LURE_ON_DEMAND': False } From 741363a9ea11e1f0b3baf719374d92791f06f183 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Dixneuf?= Date: Thu, 22 Jun 2017 08:35:22 +0200 Subject: [PATCH 32/38] Define LURE_ON_DEMAND --- monocle/sanitized.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monocle/sanitized.py b/monocle/sanitized.py index b81cbfc75..7767859ea 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -126,7 +126,8 @@ 'TWITTER_SCREEN_NAME': str, 'TZ_OFFSET': Number, 'UVLOOP': bool, - 'WEBHOOKS': set_sequence + 'WEBHOOKS': set_sequence, + 'LURE_ON_DEMAND': bool } _defaults = { From 5476e33e18aa71577e8bc84af44d32411a7ff659 Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 10:55:05 +0200 Subject: [PATCH 33/38] Encounter only if player level >= 30 --- monocle/worker.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monocle/worker.py b/monocle/worker.py index 9931579c9..c0923af19 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -786,10 +786,10 @@ async def visit_point(self, point, spawn_id, bootstrap, if (normalized not in SIGHTING_CACHE and normalized not in MYSTERY_CACHE): - # if (encounter_conf == 'all' - # or (encounter_conf == 'some' - # and normalized['pokemon_id'] in conf.ENCOUNTER_IDS)): - if self.player_level != None and self.player_level >= 30: + if (encounter_conf == 'all' + or (encounter_conf == 'some' + and normalized['pokemon_id'] in conf.ENCOUNTER_IDS)) + and self.player_level != None and self.player_level >= 30: try: await self.encounter(normalized, pokemon.spawn_point_id) except CancelledError: From 677a9ab7b734ed937adcb9d9bab2e65bb566fc19 Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 10:15:46 +0200 Subject: [PATCH 34/38] Removing cp from this branch --- monocle/db.py | 8 ++------ monocle/worker.py | 1 - 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/monocle/db.py b/monocle/db.py index b8e10028b..fb38e061d 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -225,7 +225,6 @@ class Sighting(Base): sta_iv = Column(TINY_TYPE) move_1 = Column(SmallInteger) move_2 = Column(SmallInteger) - cp = Column(SmallInteger) __table_args__ = ( UniqueConstraint( @@ -254,7 +253,6 @@ class Mystery(Base): sta_iv = Column(TINY_TYPE) move_1 = Column(SmallInteger) move_2 = Column(SmallInteger) - cp = Column(SmallInteger) __table_args__ = ( UniqueConstraint( @@ -356,8 +354,7 @@ def add_sighting(session, pokemon): def_iv=pokemon.get('individual_defense'), sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2'), - cp=pokemon.get('cp') + move_2=pokemon.get('move_2') ) session.add(obj) SIGHTING_CACHE.add(pokemon) @@ -456,8 +453,7 @@ def add_mystery(session, pokemon): def_iv=pokemon.get('individual_defense'), sta_iv=pokemon.get('individual_stamina'), move_1=pokemon.get('move_1'), - move_2=pokemon.get('move_2'), - cp=pokemon.get('cp') + move_2=pokemon.get('move_2') ) session.add(obj) MYSTERY_CACHE.add(pokemon) diff --git a/monocle/worker.py b/monocle/worker.py index c0923af19..e5be2bf2e 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -1019,7 +1019,6 @@ async def encounter(self, pokemon, spawn_id): pokemon['individual_stamina'] = pdata.individual_stamina pokemon['height'] = pdata.height_m pokemon['weight'] = pdata.weight_kg - pokemon['cp'] = pdata.cp pokemon['gender'] = pdata.pokemon_display.gender except KeyError: self.log.error('Missing encounter response.') From ae997b86c3c5471d0042bff161a21d58ddc72fff Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 10:14:24 +0200 Subject: [PATCH 35/38] Resetting error codes --- monocle/worker.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monocle/worker.py b/monocle/worker.py index e5be2bf2e..5432bad7f 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -157,7 +157,7 @@ async def login(self, reauth=False): for attempt in range(-1, conf.MAX_RETRIES): try: - self.error_code = '_' + self.error_code = '»' async with self.login_semaphore: self.error_code = 'LOGIN' await self.api.set_authentication( @@ -184,8 +184,8 @@ async def login(self, reauth=False): if err: raise err + self.error_code = '°' version = 6701 - self.error_code = '-' async with self.sim_semaphore: self.error_code = 'APP SIMULATION' if conf.APP_SIMULATION: @@ -628,7 +628,7 @@ async def bootstrap_visit(self, point): for _ in range(3): if await self.visit(point, bootstrap=True): return True - self.error_code = '?' + self.error_code = '∞' self.simulate_jitter(0.00005) return False @@ -734,7 +734,7 @@ async def visit_point(self, point, spawn_id, bootstrap, encounter_conf=conf.ENCOUNTER, notify_conf=conf.NOTIFY, more_points=conf.MORE_POINTS): self.handle.cancel() - self.error_code = '?' if bootstrap else '!' + self.error_code = '∞' if bootstrap else '!' self.log.info('Visiting {0[0]:.4f},{0[1]:.4f}', point) start = time() From 9d7f45f6621031fb4f5f1b6c464e5dcca1ca018c Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 10:25:11 +0200 Subject: [PATCH 36/38] Clean branch --- monocle/db_proc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monocle/db_proc.py b/monocle/db_proc.py index 0b812757f..9490e0756 100644 --- a/monocle/db_proc.py +++ b/monocle/db_proc.py @@ -26,7 +26,7 @@ def stop(self): self.queue.put({'type': False}) def add(self, obj): - self.queue.put(obj) + self.queue.put(obj) def lure_to_add(self, pokestop_id): session = db.Session() From bbcd3f4bb650a119011d0355d1d0f19fe1c213d4 Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 11:01:38 +0200 Subject: [PATCH 37/38] Clean branch --- monocle/db.py | 30 --------------------- monocle/db_proc.py | 8 ------ monocle/sanitized.py | 6 ++--- monocle/worker.py | 28 -------------------- scripts/convert_landmarks_example.py | 39 ---------------------------- 5 files changed, 2 insertions(+), 109 deletions(-) delete mode 100755 scripts/convert_landmarks_example.py diff --git a/monocle/db.py b/monocle/db.py index fb38e061d..f1d1eeb28 100644 --- a/monocle/db.py +++ b/monocle/db.py @@ -789,33 +789,3 @@ def get_all_spawn_coords(session, pokemon_id=None): if conf.REPORT_SINCE: points = points.filter(Sighting.expire_timestamp > SINCE_TIME) return points.all() - -def has_lure_to_add(session, pokestop_id): - query = session.execute(''' - SELECT - pokestops.id, - pokestops.lat, - pokestops.lon - FROM lureToAdd - INNER JOIN pokestops ON lureToAdd.pokestop_id = pokestops.id - WHERE pokestops.external_id = '{pokestop_id}' - '''.format( - pokestop_id=pokestop_id, - )) - log.warning('{} lure asked on {}.', query.rowcount, pokestop_id) - if query.rowcount == 0: - return False - else: - return True - -def del_lure_to_add(session, pokestop_id): - query = session.execute(''' - DELETE lureToAdd - FROM lureToAdd - INNER JOIN pokestops ON lureToAdd.pokestop_id = pokestops.id - WHERE pokestops.external_id = '{pokestop_id}' - '''.format( - pokestop_id=pokestop_id, - )) - session.commit() - log.warning('Deleting lure demand {}.', pokestop_id) diff --git a/monocle/db_proc.py b/monocle/db_proc.py index 9490e0756..772ad7d8f 100644 --- a/monocle/db_proc.py +++ b/monocle/db_proc.py @@ -28,14 +28,6 @@ def stop(self): def add(self, obj): self.queue.put(obj) - def lure_to_add(self, pokestop_id): - session = db.Session() - return db.has_lure_to_add(session, pokestop_id) - - def del_lure_to_add(self, pokestop_id): - session = db.Session() - db.del_lure_to_add(session, pokestop_id) - def run(self): session = db.Session() LOOP.call_soon_threadsafe(self.commit) diff --git a/monocle/sanitized.py b/monocle/sanitized.py index 7767859ea..05aff696e 100644 --- a/monocle/sanitized.py +++ b/monocle/sanitized.py @@ -126,8 +126,7 @@ 'TWITTER_SCREEN_NAME': str, 'TZ_OFFSET': Number, 'UVLOOP': bool, - 'WEBHOOKS': set_sequence, - 'LURE_ON_DEMAND': bool + 'WEBHOOKS': set_sequence } _defaults = { @@ -232,8 +231,7 @@ 'TWITTER_SCREEN_NAME': None, 'TZ_OFFSET': None, 'UVLOOP': True, - 'WEBHOOKS': None, - 'LURE_ON_DEMAND': False + 'WEBHOOKS': None } diff --git a/monocle/worker.py b/monocle/worker.py index 5432bad7f..2c43be427 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -820,8 +820,6 @@ async def visit_point(self, point, spawn_id, bootstrap, pokemon_seen += 1 if norm not in SIGHTING_CACHE: db_proc.add(norm) - elif conf.LURE_ON_DEMAND: - await self.add_lure_pokestop(fort) if (self.pokestops and self.bag_items < self.item_capacity and time() > self.next_spin @@ -900,32 +898,6 @@ def smart_throttle(self, requests=1): except (TypeError, KeyError): return False - async def add_lure_pokestop(self, pokestop): - self.error_code = '$' - pokestop_location = pokestop.latitude, pokestop.longitude - distance = get_distance(self.location, pokestop_location) - # permitted interaction distance - 4 (for some jitter leeway) - # estimation of spinning speed limit - if distance > 36 or self.speed > SPINNING_SPEED_LIMIT: - self.error_code = '!' - return False - - # randomize location up to ~1.5 meters - self.simulate_jitter(amount=0.00001) - session = SessionManager.get() - if db_proc.lure_to_add(pokestop.id): - db_proc.del_lure_to_add(pokestop.id) - request = self.api.create_request() - self.log.warning('Request add_fort_modifier ITEM_TROY_DISK {} {} {}', pokestop.id, pokestop_location[0], pokestop_location[1]) - request.add_fort_modifier( - # modifier_type = ITEM_TROY_DISK, - modifier_type = 501, - fort_id = pokestop.id, - player_latitude = self.location[0], - player_longitude = self.location[1], - ) - responses = await self.call(request, action=1.1) - async def spin_pokestop(self, pokestop): self.error_code = '$' pokestop_location = pokestop.latitude, pokestop.longitude diff --git a/scripts/convert_landmarks_example.py b/scripts/convert_landmarks_example.py deleted file mode 100755 index 39d55b49b..000000000 --- a/scripts/convert_landmarks_example.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 - - -class Landmarks: - args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix' - - def __init__(self, query_suffix=None): - self.query_suffix = query_suffix - - self.landmarks = [] - - def add(self, *args, **kwargs): - dictionary = {self.args[num]: arg for num, arg in enumerate(args)} - dictionary.update(kwargs) - - self.landmarks.append(dictionary) - - def print_config(self): - print('Replace your old Landmarks config with the following:\n') - - if self.query_suffix: - print("QUERY_SUFFIX = '{}'".format(self.query_suffix)) - - print('LANDMARKS =', tuple(self.landmarks)) - - -### replace example below with your own old-style landmarks config ### -LANDMARKS = Landmarks(query_suffix='Salt Lake City') - -LANDMARKS.add('Rice Eccles Stadium', hashtags=['Utes']) -LANDMARKS.add('the Salt Lake Temple', hashtags=['TempleSquare']) -LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags=['CityCreek']) -LANDMARKS.add('the State Capitol', query='Utah State Capitol Building') -LANDMARKS.add('the University of Utah', hashtags=['Utes'], phrase='at', is_area=True) -LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True) -### replace example above with your own old-style landmarks config ### - - -LANDMARKS.print_config() From 462bc0fc5519818673582d3a0254a877a5518601 Mon Sep 17 00:00:00 2001 From: Unknown Date: Mon, 26 Jun 2017 11:36:42 +0200 Subject: [PATCH 38/38] Adding () --- monocle/worker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monocle/worker.py b/monocle/worker.py index 2c43be427..6c9233e2f 100644 --- a/monocle/worker.py +++ b/monocle/worker.py @@ -786,10 +786,10 @@ async def visit_point(self, point, spawn_id, bootstrap, if (normalized not in SIGHTING_CACHE and normalized not in MYSTERY_CACHE): - if (encounter_conf == 'all' + if ((encounter_conf == 'all' or (encounter_conf == 'some' and normalized['pokemon_id'] in conf.ENCOUNTER_IDS)) - and self.player_level != None and self.player_level >= 30: + and self.player_level != None and self.player_level >= 30): try: await self.encounter(normalized, pokemon.spawn_point_id) except CancelledError: