diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..68ce95c --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "cSpell.words": [ + "autocomp", + "disnake", + "sqlalchemy" + ] +} \ No newline at end of file diff --git a/Neat b/Neat new file mode 100644 index 0000000..487f4d9 --- /dev/null +++ b/Neat @@ -0,0 +1,21 @@ +URL = 'https://www.google.com/' +IMAGE_1 = 'https://cdn.discordapp.com/embed/avatars/1.png' +IMAGE_2 = 'https://cdn.discordapp.com/embed/avatars/2.png' + +# embeds must have the same url +# works with more up to 4(?) embeds on desktop, might not work at all on mobile + +embed1 = discord.Embed( + title='TITLE 1', + description='DESCRIPTION 1', + url=URL # <<< same url as other embeds +) +embed1.set_image(url=IMAGE_1) + +embed2 = discord.Embed( + title='TITLE 2', + description='DESCRIPTION 2', + url=URL # <<< same url as other embeds +) +embed2.set_image(url=IMAGE_2) +await channel.send(embeds=[embed1, embed2]) \ No newline at end of file diff --git a/__pycache__/calculate_time.cpython-310.pyc b/__pycache__/calculate_time.cpython-310.pyc new file mode 100644 index 0000000..a33727d Binary files /dev/null and b/__pycache__/calculate_time.cpython-310.pyc differ diff --git a/__pycache__/check_player.cpython-310.pyc b/__pycache__/check_player.cpython-310.pyc new file mode 100644 index 0000000..6e6ac33 Binary files /dev/null and b/__pycache__/check_player.cpython-310.pyc differ diff --git a/__pycache__/db.cpython-310.pyc b/__pycache__/db.cpython-310.pyc new file mode 100644 index 0000000..f42d50d Binary files /dev/null and b/__pycache__/db.cpython-310.pyc differ diff --git a/__pycache__/main.cpython-310.pyc b/__pycache__/main.cpython-310.pyc new file mode 100644 index 0000000..809de3c Binary files /dev/null and b/__pycache__/main.cpython-310.pyc differ diff --git a/__pycache__/sb_emojis.cpython-310.pyc b/__pycache__/sb_emojis.cpython-310.pyc new file mode 100644 index 0000000..ebc46b2 Binary files /dev/null and b/__pycache__/sb_emojis.cpython-310.pyc differ diff --git a/__pycache__/sb_utils.cpython-310.pyc b/__pycache__/sb_utils.cpython-310.pyc new file mode 100644 index 0000000..1bbce3e Binary files /dev/null and b/__pycache__/sb_utils.cpython-310.pyc differ diff --git a/__pycache__/setup.cpython-310.pyc b/__pycache__/setup.cpython-310.pyc new file mode 100644 index 0000000..b609331 Binary files /dev/null and b/__pycache__/setup.cpython-310.pyc differ diff --git a/castle_designs/21/b330bd77-44cc-4afb-89ce-598d4182d926.png b/castle_designs/21/b330bd77-44cc-4afb-89ce-598d4182d926.png new file mode 100644 index 0000000..91afa12 Binary files /dev/null and b/castle_designs/21/b330bd77-44cc-4afb-89ce-598d4182d926.png differ diff --git a/cogs/__pycache__/_template_cog.cpython-310.pyc b/cogs/__pycache__/_template_cog.cpython-310.pyc new file mode 100644 index 0000000..6102b37 Binary files /dev/null and b/cogs/__pycache__/_template_cog.cpython-310.pyc differ diff --git a/cogs/__pycache__/_vm_tracker.cpython-310.pyc b/cogs/__pycache__/_vm_tracker.cpython-310.pyc new file mode 100644 index 0000000..a72d42c Binary files /dev/null and b/cogs/__pycache__/_vm_tracker.cpython-310.pyc differ diff --git a/cogs/__pycache__/check_player.cpython-310.pyc b/cogs/__pycache__/check_player.cpython-310.pyc new file mode 100644 index 0000000..080b4a6 Binary files /dev/null and b/cogs/__pycache__/check_player.cpython-310.pyc differ diff --git a/cogs/__pycache__/relationship_manager.cpython-310.pyc b/cogs/__pycache__/relationship_manager.cpython-310.pyc new file mode 100644 index 0000000..55262a2 Binary files /dev/null and b/cogs/__pycache__/relationship_manager.cpython-310.pyc differ diff --git a/cogs/__pycache__/setup.cpython-310.pyc b/cogs/__pycache__/setup.cpython-310.pyc new file mode 100644 index 0000000..1622ce0 Binary files /dev/null and b/cogs/__pycache__/setup.cpython-310.pyc differ diff --git a/cogs/__pycache__/shk_info_tasks.cpython-310.pyc b/cogs/__pycache__/shk_info_tasks.cpython-310.pyc new file mode 100644 index 0000000..9c8f308 Binary files /dev/null and b/cogs/__pycache__/shk_info_tasks.cpython-310.pyc differ diff --git a/cogs/__pycache__/vm_tracker.cpython-310.pyc b/cogs/__pycache__/vm_tracker.cpython-310.pyc new file mode 100644 index 0000000..130287f Binary files /dev/null and b/cogs/__pycache__/vm_tracker.cpython-310.pyc differ diff --git a/cogs/relationship_manager.py b/cogs/relationship_manager.py new file mode 100644 index 0000000..b54d725 --- /dev/null +++ b/cogs/relationship_manager.py @@ -0,0 +1,75 @@ +from disnake.ext import commands +import disnake +from main import StronkBot +from loguru import logger +import httpx +from thefuzz import process +from datetime import datetime + + +class Relationship_Manager(commands.Cog): + def __init__(self,bot:StronkBot): + self.bot = bot + self.houses = ["House 1 - The Heavenly Ones","House 2 - High Castle","House 3 - The Dragons","House 4 - The Farmers","House 5 - The Navigators","House 6 - The Free Folk","House 7 - The Royals","House 8 - The Roses","House 9 - The Rams","House 10 - Fighters","House 11 - Heroes","House 12 - Stags","House 13 - Oak","House 14 - Beasts","House 15 - The Pure","House 16 - Lionheart","House 17 - The Insane","House 18 - Sinners","House 19 - The Double-Eagles","House 20 - Maidens",] + + @commands.slash_command() + async def set_relationship(self,inter:disnake.ApplicationCommandInteraction, + target_type:str = commands.Param(choices=["House", "Player"]), + relation:str = commands.Param(choices=["Faction", "Allied", "Neutral", "Peace", "Enemy", "Rogue"]), + world:str = commands.Param(description="What world does this relation apply? Leave blank to affect all worlds", default = None), + target:str = commands.Param(description="Type the name of the target of this relation change"), + comment:str = commands.Param(default="",description="If you want you can add some info about this relationship for clarity. Might not be used anywhere")): + #Validate target name + if target_type == "Player": + async with httpx.AsyncClient() as client: + response = await client.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={target}') + if target not in response.json(): return await inter.send(ephemeral=True, content=f"You submitted {target} this name does not seem valid. Make sure the name has the right capitalization. Or the player is banned") + await self.bot.database.players.update_one({"in_game_name": target},{"$set": {"in_game_name": target}},upsert=True) + if target_type == "House": + if target not in self.houses: return await inter.send(ephemeral=True,content=f"You submitted {target} this name does not seem valid. Make sure to pick from the list") + target = target.partition(" - ")[0] + + document = { + "guild_id": inter.guild_id, + "world": world, + "target": target, + "type": target_type, + "state": relation, + "comment": comment, + "author": { + "discord_id" : inter.author.id, + "discord_name" : inter.author.display_name, + "date": datetime.now() + } + } + + #Upsert the correct document to the relationship collection + + await self.bot.database.relationships.update_one({"guild_id": inter.guild_id, "type": target_type, "target": target}, {"$set": document},upsert=True) + await inter.send(ephemeral=True,content=f"Done! Set {target} as {relation}!\nNote that this does not update adverts, any changes will be updated during their scheduled updates.") + + @set_relationship.autocomplete("target") + async def autocomp_input(self, inter:disnake.ApplicationCommandInteraction,user_input:str): + if len(user_input) == 0: return [] + target_type = inter.filled_options.get("target_type") + if target_type is None: return ["Please set a target type first"] + if target_type == "Player": + async with httpx.AsyncClient() as client: + response = await client.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={user_input}') + return response.json() + if target_type == "House": + return [option[0] for option in process.extract(user_input,self.houses,limit=4)] + + @set_relationship.autocomplete("world") + async def autocomp_world(self, inter:disnake.ApplicationCommandInteraction,user_input:str): + server_document = await self.bot.database.servers.find_one({"_id":inter.guild_id}) + return server_document["worlds"].keys() + + + + + + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Relationship_Manager(bot)) \ No newline at end of file diff --git a/cogs/setup.py b/cogs/setup.py index 5503780..d3ac07c 100644 --- a/cogs/setup.py +++ b/cogs/setup.py @@ -1,13 +1,66 @@ from disnake.ext import commands +from PIL import Image import disnake +from main import StronkBot +from loguru import logger class Setup(commands.Cog): - typelist = ["liegelord","vm_tracker","diplomacy"] - def __init__(self,bot) -> None: + def __init__(self,bot:StronkBot): self.bot = bot + self.worlds = [] + self.advert_types = [] - @commands.slash_command(description='Define an advert channel',default_member_permissions=disnake.Permissions(manage_guild=True)) - async def create_advert(self, + @commands.slash_command(description="Assign a world to this server",default_member_permissions=disnake.Permissions(manage_guild=True)) + async def set_server_world(self, inter:disnake.ApplicationCommandInteraction, - advert_type:str = commands.Param(description='What type=',choices=typelist), - target: disnake.abc.GuildChannel = commands.Param(description='Channel override, optional')): + world:str, + api_key:str = None): + if not world in self.worlds: + await inter.response.send_message(content="Not found, use suggestions please",ephemeral=True) + return + + world_document = await self.bot.database.worlds.find_one({"world_name": world}) + if not world_document: + return await inter.response.send_message(ephemeral=True,content="Can't find the world, contact Strix please") + server_document = await self.bot.database.servers.find_one({"_id": inter.guild_id}) + if not server_document: + new_document = {"_id": inter.guild_id,"server_name": inter.guild.name,"worlds":{world_document["world_name"]:api_key}} + result = await self.bot.database.servers.insert_one(new_document) + return await inter.response.send_message(ephemeral=True, content=f'Great! Server registered and added {world_document["world_name"]} to your server') + world_list = server_document.get("worlds") + if world_list and world_document["_id"] in world_list: + return await inter.response.send_message(ephemeral=True, content="This world is already registered on this world!") + await self.bot.database.servers.find_one_and_update({"_id": inter.guild_id},{"$set":{"worlds.{}".format(world_document["world_name"]):api_key}},upsert=True) + await inter.response.send_message(f"Done, added {world_document['world_name']}") + @set_server_world.autocomplete("world") + async def autocomp_worlds(self, inter:disnake.ApplicationCommandInteraction, user_input:str): + if len(self.worlds) == 0: + cursor = self.bot.database.worlds.find() + world_docs = await cursor.to_list(length=None) + self.worlds = [world["world_name"] for world in world_docs if not world["ended"]] + return [world for world in self.worlds if user_input.lower() in world.lower()][:25] + + @commands.slash_command(description="Define one of your channels as an advert channel") + async def set_advert_channel(self, inter:disnake.ApplicationCommandInteraction, ad_type:str, channel:disnake.TextChannel): + if not ad_type in self.advert_types: + await inter.response.send_message(content="Not found, use suggestions please",ephemeral=True) + return + + result = await self.bot.database.adverts.update_one({"guild_id": inter.guild_id}, {"$set":{"advert_type": ad_type, "channel_id": channel.id}}, upsert=True) + await inter.send(content=f"Ok, registered {channel.name} as a {ad_type}. The advert should appear shortly.") + + + #TODO: Trigger individual refresh of this advert. + + + @set_advert_channel.autocomplete("ad_type") + async def autocomp_worlds(self, inter:disnake.ApplicationCommandInteraction, user_input:str): + if len(self.advert_types) == 0: + results = await self.bot.database.config.find_one({"name":"advert_types"}) + self.advert_types = results.get("advert_types") + return [ad_type for ad_type in self.advert_types if user_input.lower() in ad_type.lower()][:25] + + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Setup(bot)) \ No newline at end of file diff --git a/cogs/shk_info_tasks.py b/cogs/shk_info_tasks.py new file mode 100644 index 0000000..4b7ee8b --- /dev/null +++ b/cogs/shk_info_tasks.py @@ -0,0 +1,65 @@ +import aiohttp +import disnake +from disnake.ext import commands, tasks +from loguru import logger +import httpx +from datetime import datetime + + + +class Shk_tasks(commands.Cog): + def __init__(self,bot): + self.bot = bot + self.getHouseData.start() + + def cog_unload(self): + logger.info(f"{__name__} Unloaded") + self.getHouseData.cancel() + + @tasks.loop(hours=2) + async def getHouseData(self) -> None: + #Collect all worlds we have access too. + await self.bot.wait_until_ready() + logger.info("Starting House Data Update") + + filter_query = {"worlds": {"$exists": True}} + documents = await self.bot.database.servers.find(filter_query).to_list(None) + + # Combine dictionaries into a collated list of worlds + collated_worlds = {} + for doc in documents: + worlds = doc.get("worlds") + collated_worlds.update(worlds) + upsert_operations = [] + world:str + try: + async with aiohttp.ClientSession() as session: + for world,key in collated_worlds.items(): + if key is None: continue + logger.info(f"Updating house info for {world}") + for house in range(1,21): + try: + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + for user in await houseActivity.json(): + filter_query = { + "in_game_name": user["Username"], + "world": world, + "house": house + } + update_query = { + "$set": { + "timestamp": datetime.now() + } + } + await self.bot.database.players.update_one({"in_game_name": user["Username"]},{"$set": {"in_game_name": user["Username"]}},upsert=True) + await self.bot.database.house_history.update_one(filter_query, update_query, upsert=True) + except aiohttp.ServerTimeoutError: + logger.exception(f"Timeout when trying to fetch data for World {world} and house {house}") + logger.info(f"Finished {world} update") + except Exception as e: + logger.exception("Scan failed due to exception") + logger.info("Finished House Data update") + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Shk_tasks(bot)) \ No newline at end of file diff --git a/cogs/vm_tracker.py b/cogs/vm_tracker.py new file mode 100644 index 0000000..e1633ab --- /dev/null +++ b/cogs/vm_tracker.py @@ -0,0 +1,175 @@ +from disnake.ext import commands, tasks +import disnake +from main import StronkBot +from loguru import logger +from datetime import datetime, timedelta +from embed_factory import vm_advert +import httpx +from thefuzz import process + + +class Vm_tracker(commands.Cog): + def __init__(self,bot:StronkBot): + self.bot = bot + self.update_vm_embeds.start() + + def cog_unload(self): + self.update_vm_embeds.cancel() + + @commands.slash_command(description="Report someone going in VM") + async def report_vm(self,inter:disnake.ApplicationCommandInteraction, in_game_name:str): + logger.info(f"{inter.application_command.name} used by {inter.author} using {inter.filled_options}") + inter.response.defer(ephemeral=True) + async with httpx.AsyncClient() as client: + response = await client.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={in_game_name}') + if in_game_name not in response.json(): return await inter.send(content=f"You submitted {in_game_name} this name does not seem valid. Make sure the name has the right capitalization. Or the player is banned") + + #See if this is a duplicate + check = await self.bot.database.vm_entries.find_one({"in_game_name": in_game_name,"guild_id": inter.guild_id, "finished": {"$exists" : False}}) + if check is not None: + return await inter.send(content="Good news, this is already reported! :D") + await self.bot.database.vm_entries.insert_one( + {"guild_id": inter.guild_id, "in_game_name": in_game_name, "added": datetime.now()} + ) + advert = await self.bot.database.adverts.find_one({"advert_type":"vm_tracker", "guild_id": inter.guild_id}) + if advert is not None: + await self.update_adverts([advert]) + await inter.send(content=f"Thank you! {in_game_name} has been registered") + + @commands.slash_command(description="Edit a VM entry on this server") + async def edit_vm(self,inter:disnake.ApplicationCommandInteraction, in_game_name:str, action:str = commands.Param(choices=["Remove", "Adjust Start Time"], description="If you choose adjust time add the optional 'Value' argument"), value:int = commands.Param(default= 0 ,description="In hours how much do you want to adjust the start time. Negative numbers go forward in time")): + logger.info(f"{inter.application_command.name} used by {inter.author} using {inter.filled_options}") + # Get the list of players this server is allowed to edit. + filter = { + "added": { + "$gte": datetime(datetime.now().year - 1, 12, 15), + "$lte": datetime(datetime.now().year + 1, 1, 15) + }, + "guild_id": inter.guild_id, + "finished": {"$exists" : False} + } + vm_entries = await self.bot.database.vm_entries.find(filter, {"_id": 0, "in_game_name": 1} ).to_list(length=None) + eligible_names = [entry["in_game_name"] for entry in vm_entries] + filter.update({"in_game_name": in_game_name}) + if in_game_name not in eligible_names: return await inter.send(ephemeral=True, content=f"You submitted {in_game_name} this name does not seem valid. You can only edit entries submitted by your server") + if action == "Remove": + await self.bot.database.vm_entries.update_one(filter, {"$set": {"finished": datetime.now()}}) + elif action == "Adjust Start Time": + await self.bot.database.vm_entries.update_one(filter, [{"$set": {"added": { "$add": ["$added", -value*3600000]}}}]) + + advert = await self.bot.database.adverts.find_one({"advert_type":"vm_tracker", "guild_id": inter.guild_id}) + if advert is not None: + await self.update_adverts([advert]) + await inter.response.send_message(ephemeral=True,content=f"Thank you! {in_game_name} has been edited") + + @edit_vm.autocomplete("in_game_name") + async def autocomp_input(self, inter:disnake.ApplicationCommandInteraction,user_input:str): + if len(user_input) == 0: return [] + filter = { + "added": { + "$gte": datetime(datetime.now().year - 1, 12, 15), + "$lte": datetime(datetime.now().year + 1, 1, 15) + }, + "guild_id": inter.guild_id, + "finished": {"$exists" : False} + + } + vm_entries = await self.bot.database.vm_entries.find(filter, {"_id": 0, "in_game_name": 1} ).to_list(length=None) + eligible_names = [entry["in_game_name"] for entry in vm_entries] + return [option[0] for option in process.extract(user_input,set(eligible_names),limit=4)] + + @tasks.loop(hours=1) + async def update_vm_embeds(self) -> None: + await self.bot.wait_until_ready() + logger.info(f"---> Updating VM adverts") + + + #Check if any entries have expired + result = await self.bot.database.vm_entries.update_many({"added": {"$lt": datetime.now()-timedelta(days=15)}, "finished" : { "$exists" : False }},{"$set":{"finished": datetime.now()}}) + adverts = await self.bot.database.adverts.find({"advert_type": "vm_tracker"}).to_list(length=None) + + #TODO Enable some way to switch what world house colors should be pulled from. As servers might have more than one world listed. + + await self.update_adverts(adverts) + + async def update_adverts(self,adverts:list): + for advert in adverts: + #Make sure the guild is still present + target_guild = self.bot.get_guild(advert["guild_id"]) + if not target_guild: + logger.error(f"Tried updating VM advert in {advert['guild_id']} but its not present in the bot") + continue + #TODO Note infraction for later cleanup if this repeats. + target_channel = target_guild.get_channel(advert["channel_id"]) + if not target_channel: + logger.error(f"Tried updating VM advert in {target_guild.name} but the channel is not present in the guild") + continue + #TODO Note infraction, warn the server owner. Repeated failures will result in advert removal + + #Collect what world(s) the guild is looking at + + current_server_document = await self.bot.database.servers.find_one({"_id": advert["guild_id"]}) + world_list:dict = current_server_document.get("worlds", None) + + vm_entries = await self.collect_server_vms(guild_id=advert["guild_id"],all=True) + + vm_entries = sorted(vm_entries, key=lambda x: x["added"]) + #Get the relevant house affiliations if applicable + house_data = None + house_relations = None + if len(world_list) > 0: + house_data = await self.bot.database.house_history.find({"timestamp":{"$gte":datetime.now()- timedelta(hours=2)}, "in_game_name": {"$in":[entry["in_game_name"] for entry in vm_entries]}, "world":next(iter(world_list))}).to_list(length=None) + house_data = {doc["in_game_name"]: doc for doc in house_data} + house_relations = await self.bot.database.relationships.find({"guild_id": advert["guild_id"], "type": "House", "world": {"$in": [next(iter(world_list)),None]}}).to_list(length=None) + relations = {doc["target"]: doc["state"] for doc in house_relations} + + player_relations = await self.bot.database.relationships.find({"guild_id": advert["guild_id"], "type": "Player", "world": {"$in": [next(iter(world_list)),None]}}).to_list(length=None) + relations.update({doc["target"]: doc["state"] for doc in player_relations}) + + + embed = vm_advert.generate_embed(bot = self.bot, guild_id = advert["guild_id"],vm_entries=vm_entries,house_data=house_data,relations=relations) + target_message_id = advert.get("message_id", None) + if target_message_id is None: + sent_message = await target_channel.send(embed=embed) + #Add new message to advert for future updates + await self.bot.database.adverts.update_one({"guild_id":advert["guild_id"],"channel_id":advert["channel_id"]},{"$set": {"message_id":sent_message.id}}) + continue + else: + target_message = await target_channel.fetch_message(target_message_id) + await target_message.edit(embed=embed,components=None) + + async def collect_server_vms(self,guild_id:int, all=False): + + from_date = datetime(datetime.now().year - 1, 12, 15) + to_date = datetime(datetime.now().year + 1, 1, 15) + + # Build the filter for the query + filter_query = { + "added": { + "$gte": from_date, + "$lte": to_date + }, + "guild_id": guild_id, + } + if not all: + filter_query.update({"finished": {"$exists" : False}}) + return await self.bot.database.vm_entries.find(filter_query).to_list(length=None) + + @commands.Cog.listener() + async def on_button_click(self, inter:disnake.MessageInteraction): + if "vm_tracker" not in inter.component.custom_id: return + logger.info(f"{inter.component.custom_id} used by {inter.author}") + await inter.response.defer(with_message=False) + + await self.bot.database.vm + + await self.bot.database.adverts.update_one({"advert_type":"vm_tracker", "guild_id": inter.guild_id}, [ { "$set": {"private": {"$not": "$private" } } } ] ) + + advert = await self.bot.database.adverts.find_one({"advert_type":"vm_tracker", "guild_id": inter.guild_id}) + if advert is not None: + await self.update_adverts([advert]) + + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Vm_tracker(bot)) \ No newline at end of file diff --git a/cogs_dev/check_player.py b/cogs_dev/check_player.py new file mode 100644 index 0000000..2d1a778 --- /dev/null +++ b/cogs_dev/check_player.py @@ -0,0 +1,89 @@ +from disnake.ext import commands, tasks +import disnake +import db +import httpx +from datetime import datetime +from sqlalchemy import func +from loguru import logger +import sb_utils +import embed_factory.check_player +from main import StronkBot + +class Check_Player(commands.Cog): + def __init__(self,bot): + self.bot:StronkBot = bot + + @commands.slash_command(description="Look up information about a player") + async def check_player(self, inter:disnake.ApplicationCommandInteraction,player:str): + logger.info(f"{inter.application_command.name} used by {inter.author} using {inter.filled_options}") + #Get server specific shit + server_document = await self.bot.database.servers.find_one({"_id" : inter.guild_id}) + server_worlds = list(server_document.get("worlds", {}).keys()) + await inter.response.defer() + async with httpx.AsyncClient() as client: + # + #Check if player is banned + ban_response = await client.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={player}') + # + #Collect world activity data + activity_response = await client.get(f'https://shk.azure-api.net/shkinfo/v1/UserActivity?world={server_worlds[0]}&username={player}&Key={server_document["worlds"][server_worlds[0]]}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d') + if activity_response.text == '01/Jan/0001 00:00:00': + activity_date = None + else: + activity_date = datetime.datetime.strptime(activity_response.text,'%d/%b/%Y %H:%M:%S') + shk_banned_data = ban_response.json() + + if server_document is None: return await inter.send(ephemeral=True,content="Missing server setup, have you done the setup process?") + #Collect info about the player + if len(server_worlds) > 0: + # + #Collect house history + house_data = await self.bot.database.house_history.find({"in_game_name": player, "world": server_worlds[0]}).to_list(length=None) + # + #Collect Relationships with the server + target_list = [f"House {entry['house']}" for entry in house_data] + target_list.append(player) + relationships = await self.bot.database.relationships.find({ "$and" : [ { "$or" : [ { "world" : server_worlds[0] }, { "world" : { "$not" : { "$exists" : True } } }, { "world" : None } ] }, { "guild_id" : inter.guild_id }, { "target" : { "$in" : target_list } } ] }).to_list(length=None) + # + #Collect Intel data + intel = await self.bot.database.intel_screenshots.find({"in_game_name": player, "added_by.discord_guild_id": inter.guild_id}).to_list(length=None) + ## + ##Collect VM data + from_date = datetime(datetime.now().year - 1, 12, 15) + to_date = datetime(datetime.now().year + 1, 1, 15) + filter_query = { + "added": { + "$gte": from_date, + "$lte": to_date + }, + "guild_id": inter.guild_id, + "in_game_name" : player + } + vm_entries = await self.bot.database.vm_entries.find(filter_query).to_list(length=None) + + + + + + @check_player.autocomplete("player") + async def autocomp_input(self, inter:disnake.ApplicationCommandInteraction,user_input:str): + if len(user_input) == 0: return [] + async with httpx.AsyncClient() as client: + response = await client.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={user_input}') + return response.json() + + @commands.Cog.listener() + async def on_message_interaction(self,inter:disnake.MessageInteraction): + pass + +# States: +# 0 - Overview +# 1 - Castle Designs +# 2 - House History +# 3 - Bot Usage Stats +def generate_overview_embed_and_components(id:int): + pass + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Check_Player(bot)) \ No newline at end of file diff --git a/sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/REQUESTED b/cogs_dev/identify.py similarity index 100% rename from sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/REQUESTED rename to cogs_dev/identify.py diff --git a/cogs_dev/template_cog.py b/cogs_dev/template_cog.py new file mode 100644 index 0000000..dd1597f --- /dev/null +++ b/cogs_dev/template_cog.py @@ -0,0 +1,18 @@ +from disnake.ext import commands +import disnake +from main import StronkBot +from loguru import logger + + +class Vm_tracker(commands.Cog): + def __init__(self,bot:StronkBot): + self.bot = bot + def cog_unload(self): + logger.info(f"{__name__} Unloaded") + self.getHouseData.cancel() + + + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Vm_tracker(bot)) \ No newline at end of file diff --git a/cogs_dev/user_manager.py b/cogs_dev/user_manager.py new file mode 100644 index 0000000..2ad1c95 --- /dev/null +++ b/cogs_dev/user_manager.py @@ -0,0 +1,11 @@ +from disnake.ext import commands +import disnake +import db + +class User_Manager(commands.Cog): + @commands.Cog.listener() #Welcome Screen Listener + async def on_message_interaction(self,inter:disnake.MessageInteraction): + if not inter.component.custom_id.startswith("advert_welcome_"): + return + if inter.component.custom_id.partition("advert_welcome_")[2] == "social": + pass diff --git a/cogs_old/__pycache__/ban_checker.cpython-310.pyc b/cogs_old/__pycache__/ban_checker.cpython-310.pyc new file mode 100644 index 0000000..bb24575 Binary files /dev/null and b/cogs_old/__pycache__/ban_checker.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/calculate_time.cpython-310.pyc b/cogs_old/__pycache__/calculate_time.cpython-310.pyc new file mode 100644 index 0000000..bfe562e Binary files /dev/null and b/cogs_old/__pycache__/calculate_time.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/check_house.cpython-310.pyc b/cogs_old/__pycache__/check_house.cpython-310.pyc new file mode 100644 index 0000000..5188405 Binary files /dev/null and b/cogs_old/__pycache__/check_house.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/check_player.cpython-310.pyc b/cogs_old/__pycache__/check_player.cpython-310.pyc new file mode 100644 index 0000000..ed7bf56 Binary files /dev/null and b/cogs_old/__pycache__/check_player.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/parish_intel.cpython-310.pyc b/cogs_old/__pycache__/parish_intel.cpython-310.pyc new file mode 100644 index 0000000..00661ed Binary files /dev/null and b/cogs_old/__pycache__/parish_intel.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/player_intel.cpython-310.pyc b/cogs_old/__pycache__/player_intel.cpython-310.pyc new file mode 100644 index 0000000..8ca6421 Binary files /dev/null and b/cogs_old/__pycache__/player_intel.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/setup.cpython-310.pyc b/cogs_old/__pycache__/setup.cpython-310.pyc new file mode 100644 index 0000000..134f374 Binary files /dev/null and b/cogs_old/__pycache__/setup.cpython-310.pyc differ diff --git a/cogs_old/__pycache__/shk_info_tasks.cpython-310.pyc b/cogs_old/__pycache__/shk_info_tasks.cpython-310.pyc new file mode 100644 index 0000000..64193d7 Binary files /dev/null and b/cogs_old/__pycache__/shk_info_tasks.cpython-310.pyc differ diff --git a/cogs_old/ban_checker.py b/cogs_old/ban_checker.py new file mode 100644 index 0000000..1195400 --- /dev/null +++ b/cogs_old/ban_checker.py @@ -0,0 +1,22 @@ +from disnake.ext import commands +import disnake +import db +import httpx +from datetime import datetime +from loguru import logger +import sb_utils +import sb_emojis + +class PTest(commands.Cog): + def __init__(self,bot): + self.bot = bot + + @commands.slash_command() + async def test(self,inter: disnake.ApplicationCommandInteraction): + + user = await inter.bot.fetch_user(inter.author.id) + await inter.send(user.banner) + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(PTest(bot)) \ No newline at end of file diff --git a/cogs_old/calculate_time.py b/cogs_old/calculate_time.py new file mode 100644 index 0000000..211aab3 --- /dev/null +++ b/cogs_old/calculate_time.py @@ -0,0 +1,195 @@ +from ast import match_case +from urllib import response +from disnake.ext import commands +import disnake +import db +import requests +import re +from datetime import datetime, timedelta +from loguru import logger + +class Calculate_Time(commands.Cog): + def __init__(self,bot): + self.bot = bot + + @commands.slash_command(description="Calculate your times like a pro") + async def calculate_time(self,inter:disnake.ApplicationCommandInteraction): + db.log_interaction(inter,f"Issued calculate_time") + logger.info(f"{inter.author.display_name} Issued calculate time command") + await inter.response.send_modal(timesModal()) + + @commands.Cog.listener() + async def on_message_interaction(self,inter:disnake.MessageInteraction): + if 'calc_time' in inter.component.custom_id: + db.log_interaction(inter,f"Interracted with {inter.component.custom_id} on {inter.message.id}") + match(inter.component.custom_id): + case 'calc_time_delete': + await inter.response.defer() + await inter.message.delete() + case 'calc_time_add': + await inter.response.send_modal(timesModal(inter=inter)) + case 'calc_time_new': + await inter.response.send_modal(timesModal()) + case 'calc_time_show_all': + await inter.response.defer() + await update_message(inter=inter,display_all_multipliers=True) + case 'calc_time_multi': + await inter.response.defer() + with db.get_session()() as session: + time_data_list = session.query(db.Time_Data).filter(db.Time_Data.message_id == inter.message.id).all() + print(len(time_data_list)) + for t in time_data_list: + t.multiplier = int(inter.values[0][1]) + session.commit() + await update_message(inter=inter,time_data_list=time_data_list) + + + + + +def calc_times_from_modal(time_string,multi:int): + #Split times + timeList = re.split(',| ',time_string) + times = [] + try: + for num,time_ in enumerate(timeList): + if time_ == '': + continue + #Try and idiotproof some edge cases + if time_.startswith((':',';','.')): + temp_time = time_[1:] + else: + temp_time = time_ + modifier = 0 + if '|' in time_: + temp = time_.split('|') + name = temp[0] + if name.startswith('>') or name.lower() == 'breaker': + #Breaker + if name.startswith('>'): name = name[1:] + modifier = 10 + temp_time = temp[1] + else: + name = f'{num+1}' + time_components = [int(s) for s in re.split('\:|\;|\.',temp_time)] + time_components.reverse() + seconds = 0 + for n,t in enumerate(time_components): + seconds += t*60**n + times.append({'name': name, 'seconds': seconds, 'multiplier': multi, 'modifier': modifier}) + return times + except: + return [] + + +class timesModal(disnake.ui.Modal): + def __init__(self,multi=4,inter=None): + self.inter = inter + self.id = None + if inter != None: + self.id = inter.message.id + components = [ + disnake.ui.TextInput( + label="name|hh:mm:ss or hh:mm:ss can add multiple", + placeholder="Times to convert", + custom_id="times", + required=True, + ), + disnake.ui.TextInput( + label="Time multiplier", + placeholder="4", + custom_id="multi_value", + style= disnake.TextInputStyle.short, + max_length=1, + value = multi, + ), + ] + super().__init__( + title="Calculate attack times", + custom_id="submit_calc", + components=components + ) + async def callback(self, interaction: disnake.ModalInteraction): + if self.id: + await interaction.response.defer(ephemeral=True) + else: + await interaction.response.defer() + times = interaction.text_values['times'] + multi = int(interaction.text_values['multi_value']) + #do tests on times: + times_list = calc_times_from_modal(times,multi) + if times_list: + #Store relevant info + time_data_list = [] + if not self.id: + message:disnake.Message = await interaction.followup.send("If you see this for long something went wrong") + message_id = self.id or message.id + with db.get_session()() as session: + user:db.User = session.query(db.User).filter(db.User.discord_id == interaction.author.id).first() + for t in times_list: + time_data_list.append(db.Time_Data(message_id=message_id, user_id = user.id, **t)) + session.add_all(time_data_list) + session.commit() + if self.id != None: + await update_message(inter=interaction,override_inter=self.inter) + else: + await update_message(inter=interaction,time_data_list=time_data_list) + + else: + await interaction.followup.send("Something went wrong.") + return + return + + +async def update_message(inter:disnake.ApplicationCommandInteraction,time_data_list=None,display_all_multipliers=False,override_inter=None): + if not time_data_list: + with db.get_session()() as session: + time_data_list = session.query(db.Time_Data).filter(db.Time_Data.message_id == inter.message.id).all() + if not time_data_list: + await inter.followup.edit("No time data found") + return + time_data_list.sort(key=lambda y: y.seconds) + time_data_list.reverse() + + #Components + components = [[],[]] + components[0].append(disnake.ui.Select(placeholder="Change multiplier",options=["x2","x3","x4","x5","x6"],custom_id="calc_time_multi")) + if len(time_data_list) == 1: + components[1].append(disnake.ui.Button(label="Show All Multipliers", custom_id="calc_time_show_all")) + components[1].append( disnake.ui.Button(label="Add time",custom_id="calc_time_add",style=disnake.ButtonStyle.green)) + components[1].append( disnake.ui.Button(label="Create New",custom_id="calc_time_new",style=disnake.ButtonStyle.green)) + components[1].append( disnake.ui.Button(label="Delete Message",custom_id="calc_time_delete",style=disnake.ButtonStyle.danger)) + #Embed + embed = disnake.Embed() + embed.title = "Time Converter" + embed.set_author(name='Storm Brigade',icon_url='https://i.imgur.com/Opk3fCq.png') + embed.description = "Modifies any times to the desired card level\n Useful for prepping times from monk speeds" + + before_string = "" + after_string = "" + data: db.Time_Data + if not display_all_multipliers: + for data in time_data_list: + emoji = '<:Captain:947543163608924181>' + if data.modifier > 0: + emoji = '<:Breaker:947543175025819709>' + before_string += f"{emoji}**{data.name}**:{timedelta(seconds=data.seconds)} -**x{data.multiplier}**->\n" + after_string += f"{timedelta(seconds=int(data.seconds/data.multiplier))}\n" + else: + for m in range(2,7): + emoji = '<:Captain:947543163608924181>' + before_string += f"{emoji}**{time_data_list[0].name}**:{timedelta(seconds=time_data_list[0].seconds)} -**x{m}**->\n" + after_string += f"{timedelta(seconds=int(time_data_list[0].seconds/m))}\n" + + embed.add_field(name="Original",value=before_string) + embed.add_field(name="Modified",value=after_string) + if override_inter != None: + await inter.followup.send("Added to the list, this message is required for now, sorry. Just dismiss it :)") + await override_inter.edit_original_message(content="",embed=embed,components=components) + else: + await inter.edit_original_message(content="",embed=embed,components=components) + + + +def setup(bot): + bot.add_cog(Calculate_Time(bot)) \ No newline at end of file diff --git a/cogs_old/check_house.py b/cogs_old/check_house.py new file mode 100644 index 0000000..7deda34 --- /dev/null +++ b/cogs_old/check_house.py @@ -0,0 +1,68 @@ +from disnake.ext import commands +import disnake +import db +import httpx +from datetime import datetime +from loguru import logger +import sb_utils +import sb_emojis + +class Check_House(commands.Cog): + def __init__(self,bot): + self.bot = bot + + @commands.slash_command(description="Look up Activity about a house") + async def check_house(self, inter:disnake.ApplicationCommandInteraction,house=commands.Param(description="Which house?",choices=[str(x) for x in range(1,21)]),ban_check=commands.Param(description="Also check for bans? This will take a while, slowed down to avoid FF banning the bot",choices=["Yes","No"],default="No"),world:str = ""): + logger.info(f"{inter.application_command.name} used by {inter.author} using {inter.filled_options}") + await inter.response.defer(with_message=True,ephemeral=True) + async with httpx.AsyncClient() as client: + result = await client.get(f"https://shk.azure-api.net/shkinfo/v1/HouseActivity?world=World%202&house={house}&Key=5E78CFC8-1FFA-4036-8427-D94ED6E1A45B&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d") + house_players_activity = {} + for player in result.json(): + banned = None + vm = None + if ban_check == "Yes": + banned_result = await sb_utils.check_banned_player(session,client,player["Username"]) + banned = banned_result["banned"] + house_players_activity[player["Username"]] = (max(datetime.strptime(player["MaxPersonTime"],"%Y-%m-%dT%H:%M:%S"),datetime.strptime(player["MaxTraderTime"],"%Y-%m-%dT%H:%M:%S")),(banned,vm)) + #Done with Httpx session + house_result = session.query(db.House).filter(db.House.id == house).one_or_none() + #Done with db session + sorted_players = sorted(house_players_activity.items(), key=lambda x: x[1][0]) # Sort by least active + embed = disnake.Embed(title=f"{house_result.emoji} House {house} Activity Report {house_result.emoji}") + embed.description = f"Generated at: {disnake.utils.format_dt(disnake.utils.utcnow(),'D')}\nOnly tracks map activity, so if they're online and not scouting/trading etc this won't reflect that" + embed.set_thumbnail(file=disnake.File(f"resources\house_sprites\{house}.png")) + player_entries = [(f"**{player}** - {data[0].strftime('%d/%b/%Y %H:%M:%S')}\n",data[1]) for player,data in sorted_players] + temp_value = "" + for player_tuple in player_entries: + if player_tuple[1][0] != None: #Banned has been set + if player_tuple[1][0]: + banned_emoji = sb_emojis.embed_fillers["banned"] + else: + banned_emoji = sb_emojis.embed_fillers["banned_before"] + else: + banned_emoji = sb_emojis.embed_fillers["no_data"] + + if player_tuple[1][1] != None: #VM has been set + if player_tuple[1][1]: + vm_emoji = sb_emojis.embed_fillers["vm_active"] + else: + vm_emoji = sb_emojis.embed_fillers["vm_logged"] + else: + vm_emoji = sb_emojis.embed_fillers["no_data"] + temp = f"{vm_emoji}{banned_emoji} {player_tuple[0]}" + if "01/Jan/0001 00:00:00" in player_tuple[0]: + temp = temp.replace("01/Jan/0001 00:00:00","**INACTIVE/RESET**") + if len(temp_value)+len(temp) > 1024: + if len(embed)+len(temp_value) > 6000: + break + embed.add_field(name='\u200b',value=temp_value,inline=False) + temp_value = '' + temp_value += temp + embed.add_field(name='\u200b',value=temp_value,inline=False) + embed.colour = disnake.Color(int(house_result.color,0)) + await inter.send("Here you go, blame discord for this dumb message...",delete_after=1) + await inter.channel.send(embed=embed) + +def setup(bot): + bot.add_cog(Check_House(bot)) \ No newline at end of file diff --git a/cogs_old/parish_intel.py b/cogs_old/parish_intel.py new file mode 100644 index 0000000..5b98d5a --- /dev/null +++ b/cogs_old/parish_intel.py @@ -0,0 +1,24 @@ +from disnake.ext import commands +import disnake +import db +import httpx +from datetime import datetime +from loguru import logger +import sb_utils +import sb_emojis +import csv +from io import StringIO + +class Parish_Intel(commands.Cog): + def __init__(self,bot): + self.bot = bot + + @commands.slash_command() + async def moon(self,inter): + asd = disnake.Embed(title="KEK0") + asd.set_image(file=disnake.File("0.png")) + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Parish_Intel(bot)) + \ No newline at end of file diff --git a/cogs_old/player_intel.py b/cogs_old/player_intel.py new file mode 100644 index 0000000..e68a62d --- /dev/null +++ b/cogs_old/player_intel.py @@ -0,0 +1,66 @@ +from disnake.ext import commands +import disnake +import db +import httpx +from datetime import datetime +from loguru import logger +import sb_utils +import sb_emojis +import csv +from io import StringIO +from uuid import uuid4 +from embed_factory import castle_design_confirmation +from os import makedirs + + +class Player_Intel(commands.Cog): + def __init__(self, bot): + self.bot = bot + + @commands.slash_command() + async def upload_castle(self, inter:disnake.ApplicationCommandInteraction,player: str, design: disnake.Attachment): + if not design.content_type.startswith('image'): + return await inter.send("You can only use this with pictures",ephemeral=True) + async with httpx.AsyncClient() as client: + response = await client.get( + f"http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={player}" + ) + response = response.json() + if len(response) != 1: + return await inter.send(f"Sorry, {player} could not be found or theres too many results, check spelling") + + await inter.response.defer(ephemeral=True) + + with db.get_session()() as session: + player:db.Player = db.get_or_create(session,db.Player,player_name = player)[0] + server:db.Server = db.get_or_create(session,db.Server,server_name = inter.guild.name,discord_guild_id = inter.guild.id)[0] + file_name, ext = design.filename.split('.') + path = f"castle_designs//{player.id}" + makedirs(path) + await design.save(f"{path}//{uuid4()}.{ext}") + castle_design = db.Player_Castle_Designs(path=f"{path}//{uuid4()}.{ext}", player_id=player.id, server_id=server.id) + session.add(castle_design) + session.commit() + target_advert = session.query(db.Advert).filter(db.Advert.server_id == server.id).filter(db.Advert.advert_type_id == 6).first() + if not target_advert: + await inter.send("There's not a castle_design channel for this server, so posting confirmation in this channel") + await inter.channel.send(embed=castle_design_confirmation.generate_embed(design,player.player_name,inter.author.display_name),components=castle_design_confirmation.generate_buttons(inter.author.id)) + + + + @upload_castle.autocomplete("player") + async def autocomp_input( + self, inter: disnake.ApplicationCommandInteraction, user_input: str + ): + if len(user_input) == 0: + return [] + async with httpx.AsyncClient() as client: + response = await client.get( + f"http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={user_input}" + ) + return response.json() + + +def setup(bot): + logger.info(f"{__name__} Loaded") + bot.add_cog(Player_Intel(bot)) diff --git a/db.py b/db.py index 961d04a..e96a199 100644 --- a/db.py +++ b/db.py @@ -1,145 +1,197 @@ #! .\sbsheriff\scripts\python.exe -import sqlalchemy as sqla +import json +from time import time +import disnake +from requests import session +from sqlalchemy import Integer, create_engine, String, Column, ForeignKey,Table,Boolean,BigInteger,SmallInteger,DateTime,func from sqlalchemy.orm import declarative_base, relationship, scoped_session, sessionmaker +import sqlite3 +from dotenv import load_dotenv +from os import getenv -engine = sqla.create_engine('postgresql://strix:unnipus1213@192.168.1.2:5432/sbdb') +load_dotenv(".env", override=True) +engine = create_engine(getenv("DB_CON"),pool_pre_ping=True) Base = declarative_base() +Session = sessionmaker(engine) class World(Base): __tablename__ = 'world' - id = sqla.Column(sqla.Integer, primary_key=True) - short_name = sqla.Column(sqla.String(50)) - world_name = sqla.Column(sqla.String(50)) - shk_id = sqla.Column(sqla.Integer) - age = sqla.Column(sqla.Integer) - ended = sqla.Column(sqla.Boolean) - hoh_scraped = sqla.Column(sqla.Boolean) + id = Column(Integer, primary_key=True) + short_name = Column(String(50)) + world_name = Column(String(50)) + shk_id = Column(Integer) + age = Column(Integer) + ended = Column(Boolean) + hoh_scraped = Column(Boolean) servers = relationship('Server',back_populates='worlds') #Checked hoh_entries = relationship('Hall_Of_Heroes',back_populates='world') #Checked house_histories = relationship('House_History',back_populates='world') #Checked + parishes = relationship("Parish_Data",back_populates='world') #Checked class Player(Base): __tablename__ = 'player' - id = sqla.Column(sqla.Integer, primary_key=True) - player_name = sqla.Column(sqla.String(100)) - user_id = sqla.Column(sqla.Integer,sqla.ForeignKey('user.id')) + id = Column(Integer, primary_key=True) + player_name = Column(String(100)) + user_id = Column(Integer,ForeignKey('user.id')) user = relationship('User',back_populates='player') #Checked vm_entries = relationship('Vm_Entry',back_populates='player') #Checked hoh_entries = relationship("Hall_Of_Heroes",back_populates="player") #Checked house_history = relationship("House_History",back_populates="player") #Checked castle_designs = relationship("Player_Castle_Designs",back_populates="player") #Checked + checks = relationship('Check_Player_Data',back_populates='player') + world_data = relationship("WorldPlayerData",back_populates="player")#C -server_user = sqla.Table( +class PlayerBan(Base): + __tablename__ = "player_ban" + id = Column(Integer,primary_key=True) + player_id = Column(Integer,ForeignKey("player.id")) + date_added = Column(DateTime,server_default=func.now()) + last_check = Column(DateTime,server_default=func.now()) + unbanned = Column(Boolean,default=False) + permanent = Column(Boolean,default=False) + +class BanTrackedHouse(Base): + __tablename__ = "ban_tracked_house" + id = Column(Integer,primary_key=True) + house_id = Column(Integer,ForeignKey("house.id")) + tracked_by_server_id = Column(Integer,ForeignKey("server.id")) + +class BanTrackedPlayer(Base): + __tablename__ = "ban_tracked_player" + id = Column(Integer,primary_key=True) + player_id = Column(Integer,ForeignKey("player.id")) + tracked_by_server_id = Column(Integer,ForeignKey("server.id")) + +server_user = Table( "association", Base.metadata, - sqla.Column("server_id",sqla.ForeignKey("server.id")), - sqla.Column("user_id",sqla.ForeignKey("user.id")), + Column("server_id",ForeignKey("server.id")), + Column("user_id",ForeignKey("user.id")), ) +class WorldPlayerData(Base): + __tablename__ = 'world_player_data' + id = Column(Integer,primary_key=True) + player_id = Column(Integer,ForeignKey("player.id")) + world_id = Column(Integer,ForeignKey("world.id")) + cp_rank = Column(Integer) + player = relationship("Player",back_populates="world_data")#C + class Server(Base): __tablename__ = 'server' - id = sqla.Column(sqla.Integer, primary_key=True) - server_name = sqla.Column(sqla.Integer) - discord_guild_id = sqla.Column(sqla.BigInteger,unique=True) - world_id = sqla.Column(sqla.Integer,sqla.ForeignKey('world.id')) + id = Column(Integer, primary_key=True) + server_name = Column(String) + discord_guild_id = Column(BigInteger,unique=True) + world_id = Column(Integer,ForeignKey('world.id')) + api_key = Column(String) worlds = relationship("World",back_populates="servers") #Checked users = relationship("User",secondary=server_user,back_populates='servers') #Checked + roles = relationship("Server_Roles", back_populates="server") + adverts = relationship("Advert",back_populates="server") + class User(Base): __tablename__ = 'user' - id = sqla.Column(sqla.Integer, primary_key=True) - discord_id = sqla.Column(sqla.BigInteger,unique=True) - display_name = sqla.Column(sqla.String(50)) + id = Column(Integer, primary_key=True) + discord_id = Column(BigInteger,unique=True) + display_name = Column(String(50)) servers = relationship("Server",secondary=server_user,back_populates='users') #Checked - #Parent in user prefferences relationship - user_prefference = relationship("User_Prefference",back_populates="user",uselist=False) #Checked + #Parent in user preferences relationship + user_preference = relationship("User_Preference",back_populates="user",uselist=False) #Checked #Parent in user player relationship player = relationship('Player',back_populates='user',uselist=False) #Checked -class User_Prefference(Base): - __tablename__ = 'user_prefference' - id = sqla.Column(sqla.Integer,primary_key=True) - preffered_pikes = sqla.Column(sqla.Integer) - preffered_archers = sqla.Column(sqla.Integer) - user_id = sqla.Column(sqla.Integer,sqla.ForeignKey('user.id')) - user = relationship('User',back_populates='user_prefference') #Checked + parish_actions = relationship("Parish_Action",back_populates='user') + time_data = relationship("Time_Data",back_populates='user') + +class User_Preference(Base): + __tablename__ = 'user_preference' + id = Column(Integer,primary_key=True) + preferred_pikes = Column(Integer) + preferred_archers = Column(Integer) + user_id = Column(Integer,ForeignKey('user.id')) + user = relationship('User',back_populates='user_preference') #Checked class Advert_Type(Base): __tablename__ = 'advert_type' - id = sqla.Column(sqla.Integer, primary_key=True) - advert_name = sqla.Column(sqla.String(50)) + id = Column(Integer, primary_key=True) + advert_name = Column(String(50)) adverts = relationship("Advert",back_populates="advert_type") #Checked class Advert(Base): __tablename__ = 'advert' - advert_id = sqla.Column(sqla.Integer, primary_key=True) - advert_type_id = sqla.Column(sqla.SmallInteger, sqla.ForeignKey("advert_type.id")) - channel_id = sqla.Column(sqla.BigInteger) - message_id = sqla.Column(sqla.BigInteger) + advert_id = Column(Integer, primary_key=True) + advert_type_id = Column(SmallInteger, ForeignKey("advert_type.id")) + server_id = Column(Integer,ForeignKey('server.id')) + channel_id = Column(BigInteger) + message_id = Column(BigInteger) advert_type = relationship("Advert_Type",back_populates="adverts") #Checked + server = relationship("Server",back_populates="adverts") class Vm_Entry(Base): __tablename__ = 'vm_entry' - id = sqla.Column(sqla.Integer, primary_key=True) - added_by_user_id = sqla.Column(sqla.Integer, sqla.ForeignKey("user.id")) - player_id = sqla.Column(sqla.Integer, sqla.ForeignKey("player.id")) - server_id = sqla.Column(sqla.Integer, sqla.ForeignKey("server.id")) - time_added = sqla.Column(sqla.Time) - time_ended = sqla.Column(sqla.Time,nullable=True) + id = Column(Integer, primary_key=True) + added_by_user_id = Column(Integer, ForeignKey("user.id")) + player_id = Column(Integer, ForeignKey("player.id")) + server_id = Column(Integer, ForeignKey("server.id")) + time_added = Column(DateTime,server_default=func.now()) + time_ended = Column(DateTime,nullable=True) player = relationship("Player",back_populates="vm_entries") #Checked class House(Base): __tablename__ = 'house' - id = sqla.Column(sqla.Integer, primary_key=True) - color = sqla.Column(sqla.String) - emoji = sqla.Column(sqla.String) + id = Column(Integer, primary_key=True) + name = Column(String) + color = Column(String) + emoji = Column(String) relationships = relationship("House_Relationship",back_populates="house") class Relationship_State(Base): __tablename__ = 'relationship_state' - id = sqla.Column(sqla.Integer, primary_key=True) - relationship_name = sqla.Column(sqla.String(50)) - alignment = sqla.Column(sqla.Integer) + id = Column(Integer, primary_key=True) + relationship_name = Column(String(50)) + alignment = Column(Integer) + emoji = Column(String) house_relations = relationship('House_Relationship',back_populates='relationship_state') #Checked player_relations = relationship('Player_Relationship',back_populates='relationship_state')#Checked class House_Relationship(Base): __tablename__ = 'house_relationship' - id = sqla.Column(sqla.Integer, primary_key=True) + id = Column(Integer, primary_key=True) - server_id = sqla.Column(sqla.Integer, sqla.ForeignKey("server.id")) - house_id = sqla.Column(sqla.Integer, sqla.ForeignKey("house.id")) + server_id = Column(Integer, ForeignKey("server.id")) + house_id = Column(Integer, ForeignKey("house.id")) house = relationship("House",back_populates="relationships") #Checked - relationship_state_id = sqla.Column(sqla.Integer, sqla.ForeignKey("relationship_state.id")) + relationship_state_id = Column(Integer, ForeignKey("relationship_state.id")) relationship_state = relationship("Relationship_State",back_populates="house_relations") #Checked class Player_Relationship(Base): __tablename__ = 'player_relationship' - id = sqla.Column(sqla.Integer, primary_key=True) - note = sqla.Column(sqla.String(100),nullable=True) + id = Column(Integer, primary_key=True) + note = Column(String(100),nullable=True) - server_id = sqla.Column(sqla.Integer, sqla.ForeignKey("server.id")) - player_id = sqla.Column(sqla.Integer, sqla.ForeignKey("player.id")) - relationship_state_id = sqla.Column(sqla.Integer, sqla.ForeignKey("relationship_state.id")) + server_id = Column(Integer, ForeignKey("server.id")) + player_id = Column(Integer, ForeignKey("player.id")) + relationship_state_id = Column(Integer, ForeignKey("relationship_state.id")) relationship_state = relationship('Relationship_State',back_populates='player_relations')#Checked class Hoh_Rank(Base): __tablename__ = 'hoh_rank' - id = sqla.Column(sqla.Integer, primary_key=True) - rank_name = sqla.Column(sqla.String(50)) + id = Column(Integer, primary_key=True) + rank_name = Column(String(50)) hoh_entries = relationship("Hall_Of_Heroes",back_populates="rank") #Checked class Hall_Of_Heroes(Base): __tablename__ = 'hall_of_heroes' - id = sqla.Column(sqla.Integer,primary_key=True) - player_id = sqla.Column(sqla.Integer,sqla.ForeignKey('player.id')) - hoh_rank_id = sqla.Column(sqla.Integer,sqla.ForeignKey('hoh_rank.id')) - world_id = sqla.Column(sqla.Integer,sqla.ForeignKey('world.id')) + id = Column(Integer,primary_key=True) + player_id = Column(Integer,ForeignKey('player.id')) + hoh_rank_id = Column(Integer,ForeignKey('hoh_rank.id')) + world_id = Column(Integer,ForeignKey('world.id')) #Parent in player hoh relationship player = relationship('Player',back_populates='hoh_entries') #Checked @@ -148,39 +200,191 @@ class Hall_Of_Heroes(Base): class Player_Castle_Designs(Base): __tablename__ = "player_castle_designs" - id = sqla.Column(sqla.Integer,primary_key=True) - path = sqla.Column(sqla.String) - player_id = sqla.Column(sqla.Integer,sqla.ForeignKey("player.id")) - server_id = sqla.Column(sqla.Integer,sqla.ForeignKey("server.id")) + id = Column(Integer,primary_key=True) + path = Column(String) + player_id = Column(Integer,ForeignKey("player.id")) + server_id = Column(Integer,ForeignKey("server.id")) + validated = Column(Boolean,default=False) player = relationship("Player", back_populates="castle_designs") #Checked server = relationship("Server") #Checked class House_History(Base): __tablename__ = 'house_history' - id = sqla.Column(sqla.Integer,primary_key=True) - date = sqla.Column(sqla.Date) + id = Column(Integer,primary_key=True) + date = Column(DateTime) - player_id = sqla.Column(sqla.Integer,sqla.ForeignKey("player.id")) - world_id = sqla.Column(sqla.Integer,sqla.ForeignKey("world.id")) - house_id = sqla.Column(sqla.Integer,sqla.ForeignKey("house.id")) + player_id = Column(Integer,ForeignKey("player.id")) + world_id = Column(Integer,ForeignKey("world.id")) + house_id = Column(Integer,ForeignKey("house.id")) player = relationship("Player",back_populates='house_history') #Checked world = relationship("World",back_populates='house_histories') #Checked house = relationship("House") #Checked class Liegelord_Requests(Base): __tablename__ = 'liegelord_requests' - id = sqla.Column(sqla.Integer,primary_key=True) - village_id = sqla.Column(sqla.SmallInteger) - confirm_message_id = sqla.Column(sqla.BigInteger) + id = Column(Integer,primary_key=True) + village_id = Column(SmallInteger) + confirm_message_id = Column(BigInteger) - server_id = sqla.Column(sqla.Integer,sqla.ForeignKey("server.id")) - request_user_id = sqla.Column(sqla.Integer,sqla.ForeignKey('user.id')) - fulfilled_user_id = sqla.Column(sqla.Integer,sqla.ForeignKey('user.id')) + server_id = Column(Integer,ForeignKey("server.id")) + request_user_id = Column(Integer,ForeignKey('user.id')) + fulfilled_user_id = Column(Integer,ForeignKey('user.id')) server = relationship("Server") #Checked request_user = relationship("User",foreign_keys=[request_user_id]) fulfilled_user= relationship("User",foreign_keys=[fulfilled_user_id]) -def create_session(): - session = scoped_session(sessionmaker(bind=engine)) - return session \ No newline at end of file +class Parish_Data(Base): + __tablename__ = 'parish_data' + id = Column(Integer,primary_key=True) + parish_id = Column(Integer) + parish_name = Column(String) + world_id = Column(Integer,ForeignKey('world.id')) + + world = relationship("World",back_populates='parishes') #Checked + actions = relationship("Parish_Action",back_populates='parish') #Checked + +class Parish_Action(Base): + __tablename__ = 'parish_action' + id = Column(Integer,primary_key=True) + parish_id = Column(Integer,ForeignKey('parish_data.id')) + performed_by_user_id = Column(Integer,ForeignKey('user.id')) + performed_at = Column(DateTime,server_default=func.now()) + building_id = Column(Integer,ForeignKey('parish_building.id')) + + + parish = relationship("Parish_Data",back_populates = 'actions') #Checked + building = relationship("Parish_Building",back_populates='actions') #Checked + user = relationship("User",back_populates='parish_actions') #Checked + +class Parish_Building(Base): + __tablename__ = 'parish_building' + id = Column(Integer,primary_key=True) + name = Column(String) + actions = relationship("Parish_Action",back_populates='building') #checked + +class User_Interaction(Base): + __tablename__ = "user_interaction" + id = Column(Integer,primary_key=True) + date = Column(DateTime,server_default=func.now()) + action = Column(String) + user_id = Column(Integer,ForeignKey("user.id")) + server_id = Column(Integer,ForeignKey("server.id")) + +class Time_Data(Base): + __tablename__ = 'time_data' + id = Column(Integer,primary_key = True) + message_id = Column(BigInteger) + name = Column(String) + seconds = Column(Integer) + multiplier = Column(Integer) + modifier = Column(Integer) + user_id = Column(Integer,ForeignKey('user.id')) + + user = relationship("User",back_populates='time_data') + +class Check_Player_Data(Base): + __tablename__ = 'check_player_data' + id = Column(Integer,primary_key = True) + player_id = Column(Integer,ForeignKey('player.id')) + + player = relationship('Player',back_populates='checks') + +class AI(Base): + __tablename__ = 'ai' + id = Column(Integer, primary_key=True) + name = Column(String) + +class AI_Castle(Base): + __tablename__ = 'ai_castle' + id = Column(Integer, primary_key=True) + ai_id = Column(Integer, ForeignKey('ai.id')) + ai_level = Column(Integer) + archers = Column(Integer) + pikemen = Column(Integer) + catapults = Column(Integer) + swordmen = Column(Integer) + captains = Column(Integer) + parish_compatible = Column(Boolean) + img_path = Column(String) + +class Role_Type(Base): + __tablename__ = 'role_type' + id = Column(Integer, primary_key=True) + name = Column(String) + +class Server_Roles(Base): + __tablename__ = 'server_roles' + id = Column(Integer, primary_key=True) + server_id = Column(Integer, ForeignKey('server.id')) + role_type_id = Column(Integer, ForeignKey('role_type.id')) + discord_role_id = Column(BigInteger) + + server = relationship("Server",back_populates="roles") + + + + +def get_session(): + return Session + +def log_interaction(inter:disnake.ApplicationCommandInteraction,action:str): + with Session() as session: + #Get or Create User + user = session.query(User).filter(User.discord_id==inter.author.id).first() + server = session.query(Server).filter(Server.discord_guild_id==inter.guild_id).first() + if user == None: + #Create user object and assosiate with server + print(f"adding new user to database -> {inter.author.display_name}") + user = User(display_name=inter.author.display_name,discord_id=inter.author.id) + session.add(user) + session.commit() + if not server in user.servers: + print(f"associating new user to server -> {inter.author.display_name} -> {inter.guild.name}") + user.servers.append(server) + interaction = User_Interaction(user_id=user.id,server_id=server.id,action=action) + session.add(interaction) + session.commit() + +def add_players_from_search(json:dict): + with Session() as session: + for entry in json: + player = session.query(Player).filter(Player.player_name==entry).first() + if player == None: + player = Player(player_name=entry) + session.add(player) + session.commit() + +def get_or_create(session, model, defaults=None, **kwargs): + instance = session.query(model).filter_by(**kwargs).one_or_none() + if instance: + return instance, False + else: + kwargs |= defaults or {} + instance = model(**kwargs) + try: + session.add(instance) + session.commit() + except Exception: + session.rollback() + instance = session.query(model).filter_by(**kwargs).one() + return instance, False + else: + return instance, True + +def get_adverts(): + with Session() as session: + return session.query(Advert_Type).all() + +def get_advert_type_by_name(name: str): + with Session() as session: + advert_id = session.query(Advert_Type).filter_by(advert_name=name).one_or_none() + if advert_id: + return advert_id.id + else: + return None +def get_ai_castles(ai_id: int): + with Session() as session: + return session.query(AI_Castle).filter(AI_Castle.ai_id == ai_id).all() + +#Base.metadata.create_all(bind=engine) \ No newline at end of file diff --git a/embed_factory/__pycache__/castle_design_confirmation.cpython-310.pyc b/embed_factory/__pycache__/castle_design_confirmation.cpython-310.pyc new file mode 100644 index 0000000..dff3e8c Binary files /dev/null and b/embed_factory/__pycache__/castle_design_confirmation.cpython-310.pyc differ diff --git a/embed_factory/__pycache__/check_player.cpython-310.pyc b/embed_factory/__pycache__/check_player.cpython-310.pyc new file mode 100644 index 0000000..e5340e0 Binary files /dev/null and b/embed_factory/__pycache__/check_player.cpython-310.pyc differ diff --git a/embed_factory/__pycache__/embeds.cpython-310.pyc b/embed_factory/__pycache__/embeds.cpython-310.pyc new file mode 100644 index 0000000..fa54781 Binary files /dev/null and b/embed_factory/__pycache__/embeds.cpython-310.pyc differ diff --git a/embed_factory/__pycache__/vm_advert.cpython-310.pyc b/embed_factory/__pycache__/vm_advert.cpython-310.pyc new file mode 100644 index 0000000..bc550fa Binary files /dev/null and b/embed_factory/__pycache__/vm_advert.cpython-310.pyc differ diff --git a/embed_factory/castle_design_confirmation.py b/embed_factory/castle_design_confirmation.py new file mode 100644 index 0000000..02ec9f0 --- /dev/null +++ b/embed_factory/castle_design_confirmation.py @@ -0,0 +1,13 @@ +import disnake +from embed_factory import embeds + +def generate_embed(attachment:disnake.Attachment,player_name,author_name): + embed = embeds.base_embed() + embed.set_image(attachment.to_file()) + embed.description = f"{author_name} submits this design for {player_name}" + return embed + +def generate_buttons(author_id): + confirm = disnake.ui.Button(label="Confirm Submission",emoji="✅",style=disnake.ButtonStyle.green,custom_id=f"cdc-confirm-{author_id}") + delete = disnake.ui.Button(label="Delete",emoji="❌",style=disnake.ButtonStyle.red,custom_id="cdc-delete") + return [confirm,delete] \ No newline at end of file diff --git a/embed_factory/check_player.py b/embed_factory/check_player.py new file mode 100644 index 0000000..51dd52a --- /dev/null +++ b/embed_factory/check_player.py @@ -0,0 +1,6 @@ +import disnake +import sqlalchemy.orm +class Overview(disnake.Embed): + def __init__(self, session:sqlalchemy.orm.session, target_player_id:int): + + super().__init__(title=f"{2+2}") \ No newline at end of file diff --git a/embed_factory/embeds.py b/embed_factory/embeds.py new file mode 100644 index 0000000..179d22c --- /dev/null +++ b/embed_factory/embeds.py @@ -0,0 +1,19 @@ +import disnake +import httpx + +class base_embed(disnake.Embed): + def __init__(self): + super().__init__() + self.set_footer(text="Provided by Storm Brigade",icon_url='https://i.imgur.com/Opk3fCq.png') + #self.set_author(name='Storm Brigade',icon_url='https://i.imgur.com/Opk3fCq.png') + self.set_thumbnail(file=disnake.File("resources\StormBrigade_White.png")) + + async def add_player_shield(self,in_game_name:str): + async with httpx.AsyncClient() as client: + response = await client.get(f"https://login.strongholdkingdoms.com/ajaxphp/get_shield_url.php?username={in_game_name}&transparent=1") + response_json = response.json() + if response_json.get("url", None) is not None: + self.set_thumbnail(url=response_json.get("url")) + return self + + \ No newline at end of file diff --git a/embed_factory/vm_advert.py b/embed_factory/vm_advert.py new file mode 100644 index 0000000..5853c54 --- /dev/null +++ b/embed_factory/vm_advert.py @@ -0,0 +1,87 @@ +import disnake +from embed_factory import embeds +import sb_emojis +from datetime import datetime, timedelta +from main import StronkBot + +def generate_embed(bot:StronkBot, guild_id:int, vm_entries:dict,house_data:dict,relations:dict): + embed = embeds.base_embed() + embed.title = f"VM tracker" + embed.description = f"Players currently on VM\n Use to add more" + + vm_names = [entry["in_game_name"] for entry in vm_entries] + + if len(vm_entries) == 0: return embed + entry_list = [] + for entry in vm_entries: + if entry.get("finished"): continue + vm_num = '<:SecondVm:1020804212399018157>' if vm_names.count(entry["in_game_name"]) > 1 else '<:no_data:1020809240648101978>' + house_emoji = '<:no_data:1020809240648101978>' if house_data.get(entry["in_game_name"]) is None else sb_emojis.houseEmojis.get(house_data.get(entry["in_game_name"]).get("house")) + #Check relationships + relation_state = None + if house_data.get(entry["in_game_name"]) is not None: + relation_state = relations.get(f"House {house_data.get(entry['in_game_name']).get('house')}") + + if relation_state is None: relation_state = relations.get(entry["in_game_name"]) + relationship_emoji = '<:no_data:1020809240648101978>' if relation_state is None else sb_emojis.relationshipEmojis.get(relation_state) or '<:no_data:1020809240648101978>' + entry_list.append( f"{relationship_emoji}{house_emoji}{vm_num}{entry['in_game_name']}:{disnake.utils.format_dt(entry.get('added')+ timedelta(days=15), style='R')}\n") + + temp_value = '' + for vm_entry in entry_list: + if len(temp_value)+len(vm_entry) > 1024: + if len(embed)+len(temp_value) > 6000: + print("embed is going to be too large lol") + break + embed.add_field(name='\u200b',value=temp_value,inline=False) + temp_value = '' + temp_value += vm_entry + embed.add_field(name='\u200b',value=temp_value,inline=False) + return embed + +def generate_components(): + return [disnake.ui.Button(label="Remove VM", emoji="➖", custom_id="vm_tracker.spawn_remove")] + +def vm_advert(gID = None): + embed = disnake.Embed(title="VM tracker",description = "Players currently on VM") + embed.set_author(name='Storm Brigade',icon_url='https://i.imgur.com/Opk3fCq.png') + if gID == None: + return embed + #Cleanup expired vm's + exired_vms = helpers.sql_get('SELECT name,added FROM vm_entries WHERE gID = {gID} AND added < unixepoch("now","-15 days") AND finished IS NULL'.format(gID=gID)) + vm_length = 1296000 + if len(exired_vms) > 0: + for vm in exired_vms: + helpers.sql_set("UPDATE vm_entries SET finished = ? WHERE gID = ? AND name = ? AND added = ?",(vm[1]+vm_length,gID,vm[0],vm[1])) + + #print(exired_vms) + from_date = int(datetime.datetime.fromisoformat("{YYYY}-12-15".format(YYYY=datetime.date.today().year-1)).timestamp()) + to_date = int(datetime.datetime.fromisoformat("{YYYY}-01-15".format(YYYY=datetime.date.today().year+1)).timestamp()) + #print("SELECT vm_entries.name,vm_entries.added,house.house FROM vm_entries INNER JOIN house ON LOWER(vm_entries.name) = LOWER(house.username) WHERE vm_entries.added BETWEEN {start} AND {end} AND vm_entries.finished IS NULL".format(start=from_date,end=to_date)) + query_result = helpers.sql_get("SELECT vm_entries.name,vm_entries.added,house.house FROM vm_entries LEFT JOIN house ON LOWER(vm_entries.name) = LOWER(house.username) AND house.date > unixepoch('now','-6 hours') WHERE vm_entries.added BETWEEN {start} AND {end} AND vm_entries.finished IS NULL ORDER BY added ASC".format(start=from_date,end=to_date),True) + house_json = json.loads(helpers.sql_get("SELECT relationships FROM guild WHERE gID = {gID}".format(gID = gID))[0][0]) + if len(query_result) > 0: + vm_entries = [] + for entry in query_result: + vm_num = '<:no_data:1020809240648101978>' + vm_info = helpers.sql_get(f"SELECT added FROM vm_entries WHERE LOWER(name) = LOWER('{entry['name']}') AND added BETWEEN {from_date} AND {to_date}") + if len(vm_info) > 1: + vm_num = '<:SecondVm:1020804212399018157>' + house_emoji = '<:no_data:1020809240648101978>' + relationship_emoji = '<:no_data:1020809240648101978>' + if str(entry['house']) in house_json.keys(): + relationship_emoji = botOptions.relationshipEmojis[house_json[str(entry['house'])]] + if entry['house'] != None: + house_emoji = botOptions.houseEmojis[entry['house']] + vm_entries.append( "{relationship}{house}{num}{name}:\n".format(relationship= relationship_emoji,num=vm_num,house=house_emoji,name=entry['name'],time_remaining=entry['added']+vm_length)) + temp_value = '' + for vm_entry in vm_entries: + if len(temp_value)+len(vm_entry) > 1024: + if len(embed)+len(temp_value) > 6000: + print("embed is going to be too large lol") + break + embed.add_field(name='\u200b',value=temp_value,inline=False) + temp_value = '' + temp_value += vm_entry + embed.add_field(name='\u200b',value=temp_value,inline=False) + #print(len(embed)) + return embed \ No newline at end of file diff --git a/house_history_migrate.py b/house_history_migrate.py new file mode 100644 index 0000000..e83375b --- /dev/null +++ b/house_history_migrate.py @@ -0,0 +1,87 @@ +from pymongo import MongoClient +import sqlite3 +import json +from datetime import datetime +import requests + +# Connect to SQLite +sqlite_conn = sqlite3.connect('sbsheriff.sqlite') +sqlite_conn.row_factory = sqlite3.Row +sqlite_cursor = sqlite_conn.cursor() + +# Connect to MongoDB +mongo_client = MongoClient('mongodb://sheriff:unnipus1213@192.168.1.109:27017/?retryWrites=true&serverSelectionTimeoutMS=5000&connectTimeoutMS=10000&authSource=stormbrigade&authMechanism=SCRAM-SHA-256') +mongo_db = mongo_client['stormbrigade'] +mongo_collection = mongo_db['intel_screenshots'] + +# Fetch data from SQLite table +sqlite_cursor.execute("SELECT gID, name, castle_designs, attack_designs FROM players") +rows = sqlite_cursor.fetchall() + + +# Iterate over the rows and migrate data to MongoDB +for row in rows: + + username = row['name'] + guild_id = row['gID'] + castle_design_json = row['castle_designs'] + attack_design_json = row['attack_designs'] + + # Convert house_history JSON to Python dictionary + castle_dict = json.loads(castle_design_json) + attack_dict = json.loads(attack_design_json) + #print(castle_dict) + # Iterate over house_history elements and upsert to MongoDB + + player_doc = mongo_db.players.find_one(filter={"in_game_name": {"$regex": f"^{username}$", "$options": "i"}}) + if player_doc is None: + player_doc = {} + r = requests.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={username}').json() + if len(r) > 0: + player_doc["in_game_name"] = r[0] + else: + print(username, len(castle_dict)) + continue + #print(player_doc) + + for filename in castle_dict.keys(): + filter_query = { + "in_game_name": player_doc["in_game_name"], + "added_by" : { + "added_by_discord_id": 0, + "added_by_discord_name": "Old Submission", + "added_by_discord_server_id": 947398173805133834, + }, + "type": "castle", + "filename": filename + } + update_query = { + "$set": { + "filename": filename + } + } + mongo_collection.update_one(filter_query, update_query, upsert=True) + for filename in attack_dict.keys(): + filter_query = { + "in_game_name": player_doc["in_game_name"], + "added_by" : { + "added_by_discord_id": 0, + "added_by_discord_name": "Old Submission", + "added_by_discord_server_id": 947398173805133834, + }, + "type": "attack", + "filename": filename + } + update_query = { + "$set": { + "filename": filename + } + } + mongo_collection.update_one(filter_query, update_query, upsert=True) + # Perform the upsert operation + + +# Close connections +sqlite_cursor.close() +sqlite_conn.close() +mongo_client.close() diff --git a/images/1010586680291500263.png b/images/1010586680291500263.png new file mode 100644 index 0000000..25ff5ce Binary files /dev/null and b/images/1010586680291500263.png differ diff --git a/images/1010587040141811762.png b/images/1010587040141811762.png new file mode 100644 index 0000000..0cbf0d0 Binary files /dev/null and b/images/1010587040141811762.png differ diff --git a/images/1010588276689752195.png b/images/1010588276689752195.png new file mode 100644 index 0000000..c80955e Binary files /dev/null and b/images/1010588276689752195.png differ diff --git a/images/1010588388556021810.jpg b/images/1010588388556021810.jpg new file mode 100644 index 0000000..12f2e08 Binary files /dev/null and b/images/1010588388556021810.jpg differ diff --git a/images/1010588755385663518.png b/images/1010588755385663518.png new file mode 100644 index 0000000..7f37d04 Binary files /dev/null and b/images/1010588755385663518.png differ diff --git a/images/1010589789147381850.png b/images/1010589789147381850.png new file mode 100644 index 0000000..df8793d Binary files /dev/null and b/images/1010589789147381850.png differ diff --git a/images/1010589836933083186.jpg b/images/1010589836933083186.jpg new file mode 100644 index 0000000..b515269 Binary files /dev/null and b/images/1010589836933083186.jpg differ diff --git a/images/1010590092458475640.jpg b/images/1010590092458475640.jpg new file mode 100644 index 0000000..25783ee Binary files /dev/null and b/images/1010590092458475640.jpg differ diff --git a/images/1010590421577109545.jpg b/images/1010590421577109545.jpg new file mode 100644 index 0000000..b8692e9 Binary files /dev/null and b/images/1010590421577109545.jpg differ diff --git a/images/1010590999665442837.jpg b/images/1010590999665442837.jpg new file mode 100644 index 0000000..75a600c Binary files /dev/null and b/images/1010590999665442837.jpg differ diff --git a/images/1010591448040734830.jpg b/images/1010591448040734830.jpg new file mode 100644 index 0000000..2b4911a Binary files /dev/null and b/images/1010591448040734830.jpg differ diff --git a/images/1010591828044697600.png b/images/1010591828044697600.png new file mode 100644 index 0000000..6571099 Binary files /dev/null and b/images/1010591828044697600.png differ diff --git a/images/1010592006923366521.jpg b/images/1010592006923366521.jpg new file mode 100644 index 0000000..63210d4 Binary files /dev/null and b/images/1010592006923366521.jpg differ diff --git a/images/1010592356468273232.png b/images/1010592356468273232.png new file mode 100644 index 0000000..929f721 Binary files /dev/null and b/images/1010592356468273232.png differ diff --git a/images/1010592558122008646.png b/images/1010592558122008646.png new file mode 100644 index 0000000..d602afe Binary files /dev/null and b/images/1010592558122008646.png differ diff --git a/images/1010592710979231804.png b/images/1010592710979231804.png new file mode 100644 index 0000000..80d1505 Binary files /dev/null and b/images/1010592710979231804.png differ diff --git a/images/1010592873453994004.png b/images/1010592873453994004.png new file mode 100644 index 0000000..78a7369 Binary files /dev/null and b/images/1010592873453994004.png differ diff --git a/images/1010592930894970951.png b/images/1010592930894970951.png new file mode 100644 index 0000000..71c0e2f Binary files /dev/null and b/images/1010592930894970951.png differ diff --git a/images/1010593130036347022.png b/images/1010593130036347022.png new file mode 100644 index 0000000..7364fd2 Binary files /dev/null and b/images/1010593130036347022.png differ diff --git a/images/1010593215482703902.png b/images/1010593215482703902.png new file mode 100644 index 0000000..a936970 Binary files /dev/null and b/images/1010593215482703902.png differ diff --git a/images/1010593457196257330.png b/images/1010593457196257330.png new file mode 100644 index 0000000..85b5581 Binary files /dev/null and b/images/1010593457196257330.png differ diff --git a/images/1010593589463634051.png b/images/1010593589463634051.png new file mode 100644 index 0000000..2906024 Binary files /dev/null and b/images/1010593589463634051.png differ diff --git a/images/1010593713304633384.png b/images/1010593713304633384.png new file mode 100644 index 0000000..2482803 Binary files /dev/null and b/images/1010593713304633384.png differ diff --git a/images/1010593852618440865.png b/images/1010593852618440865.png new file mode 100644 index 0000000..6d22c0c Binary files /dev/null and b/images/1010593852618440865.png differ diff --git a/images/1010593991433134151.png b/images/1010593991433134151.png new file mode 100644 index 0000000..56417d2 Binary files /dev/null and b/images/1010593991433134151.png differ diff --git a/images/1010594090217390120.png b/images/1010594090217390120.png new file mode 100644 index 0000000..6658628 Binary files /dev/null and b/images/1010594090217390120.png differ diff --git a/images/1010594802406006794.png b/images/1010594802406006794.png new file mode 100644 index 0000000..0a34aa7 Binary files /dev/null and b/images/1010594802406006794.png differ diff --git a/images/1010595011928281259.png b/images/1010595011928281259.png new file mode 100644 index 0000000..a51f9cc Binary files /dev/null and b/images/1010595011928281259.png differ diff --git a/images/1010595373724733450.png b/images/1010595373724733450.png new file mode 100644 index 0000000..4712ca4 Binary files /dev/null and b/images/1010595373724733450.png differ diff --git a/images/1010595559742115920.jpg b/images/1010595559742115920.jpg new file mode 100644 index 0000000..4fe7bc1 Binary files /dev/null and b/images/1010595559742115920.jpg differ diff --git a/images/1010598642668806144.png b/images/1010598642668806144.png new file mode 100644 index 0000000..6223a3d Binary files /dev/null and b/images/1010598642668806144.png differ diff --git a/images/1010599022165229568.png b/images/1010599022165229568.png new file mode 100644 index 0000000..802b7bb Binary files /dev/null and b/images/1010599022165229568.png differ diff --git a/images/1010599950033362984.png b/images/1010599950033362984.png new file mode 100644 index 0000000..a959200 Binary files /dev/null and b/images/1010599950033362984.png differ diff --git a/images/1010600727502147584.png b/images/1010600727502147584.png new file mode 100644 index 0000000..3305d41 Binary files /dev/null and b/images/1010600727502147584.png differ diff --git a/images/1010601590438232104.png b/images/1010601590438232104.png new file mode 100644 index 0000000..cf6fdab Binary files /dev/null and b/images/1010601590438232104.png differ diff --git a/images/1010603066946506772.png b/images/1010603066946506772.png new file mode 100644 index 0000000..8be3ace Binary files /dev/null and b/images/1010603066946506772.png differ diff --git a/images/1010604209231634472.png b/images/1010604209231634472.png new file mode 100644 index 0000000..250b8ff Binary files /dev/null and b/images/1010604209231634472.png differ diff --git a/images/1010609149459058758.png b/images/1010609149459058758.png new file mode 100644 index 0000000..7928507 Binary files /dev/null and b/images/1010609149459058758.png differ diff --git a/images/1010611947353419849.png b/images/1010611947353419849.png new file mode 100644 index 0000000..fd26a0c Binary files /dev/null and b/images/1010611947353419849.png differ diff --git a/images/1010612534946045992.png b/images/1010612534946045992.png new file mode 100644 index 0000000..5ee9472 Binary files /dev/null and b/images/1010612534946045992.png differ diff --git a/images/1010612938102542486.png b/images/1010612938102542486.png new file mode 100644 index 0000000..5ee9472 Binary files /dev/null and b/images/1010612938102542486.png differ diff --git a/images/1010620568304898129.png b/images/1010620568304898129.png new file mode 100644 index 0000000..4df1713 Binary files /dev/null and b/images/1010620568304898129.png differ diff --git a/images/1010620696390537236.png b/images/1010620696390537236.png new file mode 100644 index 0000000..e467bd8 Binary files /dev/null and b/images/1010620696390537236.png differ diff --git a/images/1010631750105432164.png b/images/1010631750105432164.png new file mode 100644 index 0000000..f54fe1a Binary files /dev/null and b/images/1010631750105432164.png differ diff --git a/images/1010645515798597752.png b/images/1010645515798597752.png new file mode 100644 index 0000000..a2740ca Binary files /dev/null and b/images/1010645515798597752.png differ diff --git a/images/1010652989427564584.png b/images/1010652989427564584.png new file mode 100644 index 0000000..c5a6ebd Binary files /dev/null and b/images/1010652989427564584.png differ diff --git a/images/1010660399798042624.png b/images/1010660399798042624.png new file mode 100644 index 0000000..4a68c8a Binary files /dev/null and b/images/1010660399798042624.png differ diff --git a/images/1010660514969419937.png b/images/1010660514969419937.png new file mode 100644 index 0000000..f311b6f Binary files /dev/null and b/images/1010660514969419937.png differ diff --git a/images/1010660899767455854.png b/images/1010660899767455854.png new file mode 100644 index 0000000..e7bb4f0 Binary files /dev/null and b/images/1010660899767455854.png differ diff --git a/images/1010661077937303573.png b/images/1010661077937303573.png new file mode 100644 index 0000000..c17fc64 Binary files /dev/null and b/images/1010661077937303573.png differ diff --git a/images/1010806342098886676.png b/images/1010806342098886676.png new file mode 100644 index 0000000..54a766f Binary files /dev/null and b/images/1010806342098886676.png differ diff --git a/images/1010839153375727666.png b/images/1010839153375727666.png new file mode 100644 index 0000000..7a633d3 Binary files /dev/null and b/images/1010839153375727666.png differ diff --git a/images/1010839192676352030.png b/images/1010839192676352030.png new file mode 100644 index 0000000..7a633d3 Binary files /dev/null and b/images/1010839192676352030.png differ diff --git a/images/1010840414082830386.png b/images/1010840414082830386.png new file mode 100644 index 0000000..e1a168b Binary files /dev/null and b/images/1010840414082830386.png differ diff --git a/images/1010867516257603715.png b/images/1010867516257603715.png new file mode 100644 index 0000000..4346a02 Binary files /dev/null and b/images/1010867516257603715.png differ diff --git a/images/1010867795866697789.png b/images/1010867795866697789.png new file mode 100644 index 0000000..9fe5e55 Binary files /dev/null and b/images/1010867795866697789.png differ diff --git a/images/1010883151943434270.png b/images/1010883151943434270.png new file mode 100644 index 0000000..775bd10 Binary files /dev/null and b/images/1010883151943434270.png differ diff --git a/images/1010883260282314752.png b/images/1010883260282314752.png new file mode 100644 index 0000000..802f140 Binary files /dev/null and b/images/1010883260282314752.png differ diff --git a/images/1010997671386943539.png b/images/1010997671386943539.png new file mode 100644 index 0000000..2405fa1 Binary files /dev/null and b/images/1010997671386943539.png differ diff --git a/images/1010997977466290378.png b/images/1010997977466290378.png new file mode 100644 index 0000000..d9de05b Binary files /dev/null and b/images/1010997977466290378.png differ diff --git a/images/1010998259289968830.png b/images/1010998259289968830.png new file mode 100644 index 0000000..6e735e2 Binary files /dev/null and b/images/1010998259289968830.png differ diff --git a/images/1010998428035186718.png b/images/1010998428035186718.png new file mode 100644 index 0000000..5abf605 Binary files /dev/null and b/images/1010998428035186718.png differ diff --git a/images/1010998553180639252.png b/images/1010998553180639252.png new file mode 100644 index 0000000..85740b0 Binary files /dev/null and b/images/1010998553180639252.png differ diff --git a/images/1010998808693456977.png b/images/1010998808693456977.png new file mode 100644 index 0000000..d09ba2a Binary files /dev/null and b/images/1010998808693456977.png differ diff --git a/images/1010998984984240188.png b/images/1010998984984240188.png new file mode 100644 index 0000000..871b62b Binary files /dev/null and b/images/1010998984984240188.png differ diff --git a/images/1010999170179539125.png b/images/1010999170179539125.png new file mode 100644 index 0000000..44eee38 Binary files /dev/null and b/images/1010999170179539125.png differ diff --git a/images/1010999574254600202.png b/images/1010999574254600202.png new file mode 100644 index 0000000..f8ce23f Binary files /dev/null and b/images/1010999574254600202.png differ diff --git a/images/1010999642336526367.png b/images/1010999642336526367.png new file mode 100644 index 0000000..9c4bc69 Binary files /dev/null and b/images/1010999642336526367.png differ diff --git a/images/1011005499359105034.png b/images/1011005499359105034.png new file mode 100644 index 0000000..c9cddc1 Binary files /dev/null and b/images/1011005499359105034.png differ diff --git a/images/1011014560225964122.png b/images/1011014560225964122.png new file mode 100644 index 0000000..3ee31e8 Binary files /dev/null and b/images/1011014560225964122.png differ diff --git a/images/1011014653733785760.png b/images/1011014653733785760.png new file mode 100644 index 0000000..3dbc66d Binary files /dev/null and b/images/1011014653733785760.png differ diff --git a/images/1011181657065259049.png b/images/1011181657065259049.png new file mode 100644 index 0000000..53371f1 Binary files /dev/null and b/images/1011181657065259049.png differ diff --git a/images/1011337038383042720.png b/images/1011337038383042720.png new file mode 100644 index 0000000..0955eda Binary files /dev/null and b/images/1011337038383042720.png differ diff --git a/images/1011337331883643090.png b/images/1011337331883643090.png new file mode 100644 index 0000000..eefd3fe Binary files /dev/null and b/images/1011337331883643090.png differ diff --git a/images/1011337851448872990.png b/images/1011337851448872990.png new file mode 100644 index 0000000..5ce7da1 Binary files /dev/null and b/images/1011337851448872990.png differ diff --git a/images/1011338994031796344.png b/images/1011338994031796344.png new file mode 100644 index 0000000..ec8d930 Binary files /dev/null and b/images/1011338994031796344.png differ diff --git a/images/1011402540736319498.png b/images/1011402540736319498.png new file mode 100644 index 0000000..237b2e5 Binary files /dev/null and b/images/1011402540736319498.png differ diff --git a/images/1012116519414677514.jpg b/images/1012116519414677514.jpg new file mode 100644 index 0000000..d1b6e60 Binary files /dev/null and b/images/1012116519414677514.jpg differ diff --git a/images/1012885024795787384.png b/images/1012885024795787384.png new file mode 100644 index 0000000..e389e12 Binary files /dev/null and b/images/1012885024795787384.png differ diff --git a/images/1012885274642096208.png b/images/1012885274642096208.png new file mode 100644 index 0000000..7272744 Binary files /dev/null and b/images/1012885274642096208.png differ diff --git a/images/1012885423174987827.png b/images/1012885423174987827.png new file mode 100644 index 0000000..e11ccd7 Binary files /dev/null and b/images/1012885423174987827.png differ diff --git a/images/1012885634236563466.png b/images/1012885634236563466.png new file mode 100644 index 0000000..4566148 Binary files /dev/null and b/images/1012885634236563466.png differ diff --git a/images/1013726239242080296.png b/images/1013726239242080296.png new file mode 100644 index 0000000..ceb2f7f Binary files /dev/null and b/images/1013726239242080296.png differ diff --git a/images/1014070726866780160.png b/images/1014070726866780160.png new file mode 100644 index 0000000..ce633c2 Binary files /dev/null and b/images/1014070726866780160.png differ diff --git a/images/1014632523407573062.png b/images/1014632523407573062.png new file mode 100644 index 0000000..7f368f0 Binary files /dev/null and b/images/1014632523407573062.png differ diff --git a/images/1014681903158284298.png b/images/1014681903158284298.png new file mode 100644 index 0000000..8623e5c Binary files /dev/null and b/images/1014681903158284298.png differ diff --git a/images/1015178580327677952.png b/images/1015178580327677952.png new file mode 100644 index 0000000..d0e291f Binary files /dev/null and b/images/1015178580327677952.png differ diff --git a/images/1015199888847413288.png b/images/1015199888847413288.png new file mode 100644 index 0000000..4a6ac79 Binary files /dev/null and b/images/1015199888847413288.png differ diff --git a/images/1015200360857604126.png b/images/1015200360857604126.png new file mode 100644 index 0000000..9749892 Binary files /dev/null and b/images/1015200360857604126.png differ diff --git a/images/1015200604827684874.png b/images/1015200604827684874.png new file mode 100644 index 0000000..5d34d46 Binary files /dev/null and b/images/1015200604827684874.png differ diff --git a/images/1015200680388071444.png b/images/1015200680388071444.png new file mode 100644 index 0000000..e3b6110 Binary files /dev/null and b/images/1015200680388071444.png differ diff --git a/images/1015201049990148096.png b/images/1015201049990148096.png new file mode 100644 index 0000000..6361e94 Binary files /dev/null and b/images/1015201049990148096.png differ diff --git a/images/1015201224544489482.png b/images/1015201224544489482.png new file mode 100644 index 0000000..4fcbd48 Binary files /dev/null and b/images/1015201224544489482.png differ diff --git a/images/1015201313186926592.png b/images/1015201313186926592.png new file mode 100644 index 0000000..d8fc474 Binary files /dev/null and b/images/1015201313186926592.png differ diff --git a/images/1015215106721841182.png b/images/1015215106721841182.png new file mode 100644 index 0000000..a14068e Binary files /dev/null and b/images/1015215106721841182.png differ diff --git a/images/1015692190590181416.png b/images/1015692190590181416.png new file mode 100644 index 0000000..b22a8f2 Binary files /dev/null and b/images/1015692190590181416.png differ diff --git a/images/1016466574313521182.png b/images/1016466574313521182.png new file mode 100644 index 0000000..5ad3bd0 Binary files /dev/null and b/images/1016466574313521182.png differ diff --git a/images/1017064303431528478.png b/images/1017064303431528478.png new file mode 100644 index 0000000..cc17df6 Binary files /dev/null and b/images/1017064303431528478.png differ diff --git a/images/1017404088473895003.png b/images/1017404088473895003.png new file mode 100644 index 0000000..fd4c753 Binary files /dev/null and b/images/1017404088473895003.png differ diff --git a/images/1017404219185172511.png b/images/1017404219185172511.png new file mode 100644 index 0000000..1fd13e4 Binary files /dev/null and b/images/1017404219185172511.png differ diff --git a/images/1017404308414812190.png b/images/1017404308414812190.png new file mode 100644 index 0000000..dab216d Binary files /dev/null and b/images/1017404308414812190.png differ diff --git a/images/1017404487138295818.png b/images/1017404487138295818.png new file mode 100644 index 0000000..7ed0c33 Binary files /dev/null and b/images/1017404487138295818.png differ diff --git a/images/1019244889071947776.jpg b/images/1019244889071947776.jpg new file mode 100644 index 0000000..60de802 Binary files /dev/null and b/images/1019244889071947776.jpg differ diff --git a/images/1022076524184219688.png b/images/1022076524184219688.png new file mode 100644 index 0000000..98227a5 Binary files /dev/null and b/images/1022076524184219688.png differ diff --git a/images/1022076524725280798.png b/images/1022076524725280798.png new file mode 100644 index 0000000..fe88b46 Binary files /dev/null and b/images/1022076524725280798.png differ diff --git a/images/1022076525178257408.png b/images/1022076525178257408.png new file mode 100644 index 0000000..01ad8bf Binary files /dev/null and b/images/1022076525178257408.png differ diff --git a/images/1022076525538988102.png b/images/1022076525538988102.png new file mode 100644 index 0000000..014506d Binary files /dev/null and b/images/1022076525538988102.png differ diff --git a/images/1022076525887098920.png b/images/1022076525887098920.png new file mode 100644 index 0000000..1dfc7c0 Binary files /dev/null and b/images/1022076525887098920.png differ diff --git a/images/1022169991795187803.png b/images/1022169991795187803.png new file mode 100644 index 0000000..4203733 Binary files /dev/null and b/images/1022169991795187803.png differ diff --git a/images/1024028159609413662.jpg b/images/1024028159609413662.jpg new file mode 100644 index 0000000..af84442 Binary files /dev/null and b/images/1024028159609413662.jpg differ diff --git a/images/1024028221475410050.jpg b/images/1024028221475410050.jpg new file mode 100644 index 0000000..ae3fc91 Binary files /dev/null and b/images/1024028221475410050.jpg differ diff --git a/images/1024028342753693886.jpg b/images/1024028342753693886.jpg new file mode 100644 index 0000000..4a941fa Binary files /dev/null and b/images/1024028342753693886.jpg differ diff --git a/images/1024028363528081478.png b/images/1024028363528081478.png new file mode 100644 index 0000000..3de82eb Binary files /dev/null and b/images/1024028363528081478.png differ diff --git a/images/1024028367944679494.jpg b/images/1024028367944679494.jpg new file mode 100644 index 0000000..a271dac Binary files /dev/null and b/images/1024028367944679494.jpg differ diff --git a/images/1024028560383557642.png b/images/1024028560383557642.png new file mode 100644 index 0000000..00e1c16 Binary files /dev/null and b/images/1024028560383557642.png differ diff --git a/images/1024716750949978122.png b/images/1024716750949978122.png new file mode 100644 index 0000000..5c705fa Binary files /dev/null and b/images/1024716750949978122.png differ diff --git a/images/1026052704281301062.png b/images/1026052704281301062.png new file mode 100644 index 0000000..5a3a9ba Binary files /dev/null and b/images/1026052704281301062.png differ diff --git a/images/1027897671261896707.png b/images/1027897671261896707.png new file mode 100644 index 0000000..32b41de Binary files /dev/null and b/images/1027897671261896707.png differ diff --git a/images/1029905922467835935.png b/images/1029905922467835935.png new file mode 100644 index 0000000..be75016 Binary files /dev/null and b/images/1029905922467835935.png differ diff --git a/info.log b/info.log new file mode 100644 index 0000000..eac1d9f --- /dev/null +++ b/info.log @@ -0,0 +1,8151 @@ +2022-12-31 00:06:21.484 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.calculate_time +2022-12-31 00:06:21.490 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.check_player +2022-12-31 00:06:21.636 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.setup +2022-12-31 00:06:21.637 | INFO | __main__:main:75 - Starting bot +2022-12-31 00:07:01.291 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.calculate_time +2022-12-31 00:07:01.293 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.check_player +2022-12-31 00:07:01.368 | INFO | __main__:load_extensions:61 - Cog loaded: cogs.setup +2022-12-31 00:07:01.370 | INFO | __main__:main:75 - Starting bot +2022-12-31 00:07:04.670 | INFO | __main__:on_ready:46 - Ready +2022-12-31 00:07:55.332 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:07:55.333 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:07:55.387 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:07:55.388 | INFO | __main__:main:76 - Starting bot +2022-12-31 00:07:58.306 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:08:14.816 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:10:55.275 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:10:55.278 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:10:55.335 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:10:55.337 | INFO | __main__:main:76 - Starting bot +2022-12-31 00:10:58.136 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:11:13.632 | INFO | cogs.calculate_time:calculate_time:18 - Strix Issued calculate time command +2022-12-31 00:11:14.866 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:39:13.980 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:39:13.987 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:39:13.988 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:39:14.077 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:39:14.078 | INFO | __main__:main:76 - Starting bot +2022-12-31 00:39:16.913 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:39:34.702 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:49:26.186 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:49:26.189 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:49:26.191 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:49:26.248 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:49:26.250 | INFO | __main__:main:95 - Starting bot +2022-12-31 00:49:29.074 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:49:43.872 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:49:57.431 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:49:57.433 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:49:57.435 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:49:57.493 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:49:57.494 | INFO | __main__:main:95 - Starting bot +2022-12-31 00:50:00.288 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:50:15.710 | INFO | __main__:reload:81 - Cog unloaded: cogs.calculate_time +2022-12-31 00:50:15.711 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_house +2022-12-31 00:50:15.712 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_player +2022-12-31 00:50:15.713 | INFO | __main__:reload:81 - Cog unloaded: cogs.setup +2022-12-31 00:50:15.715 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:50:15.716 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:50:15.718 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:50:15.723 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:50:26.008 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:50:49.685 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:50:49.687 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:50:49.689 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:50:49.743 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:50:49.744 | INFO | __main__:main:96 - Starting bot +2022-12-31 00:50:52.558 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:51:09.792 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:53:57.372 | INFO | __main__:reload:81 - Cog unloaded: cogs.calculate_time +2022-12-31 00:53:57.374 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_house +2022-12-31 00:53:57.374 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_player +2022-12-31 00:53:57.375 | INFO | __main__:reload:81 - Cog unloaded: cogs.setup +2022-12-31 00:53:57.377 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:53:57.379 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:53:57.381 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:53:57.386 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:54:30.160 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:54:30.161 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:54:30.163 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:54:30.213 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:54:30.215 | INFO | __main__:main:96 - Starting bot +2022-12-31 00:54:33.072 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:54:49.433 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 00:54:55.902 | INFO | __main__:reload:81 - Cog unloaded: cogs.calculate_time +2022-12-31 00:54:55.904 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_house +2022-12-31 00:54:55.905 | INFO | __main__:reload:81 - Cog unloaded: cogs.check_player +2022-12-31 00:54:55.906 | INFO | __main__:reload:81 - Cog unloaded: cogs.setup +2022-12-31 00:54:55.908 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:54:55.910 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:54:55.912 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:54:55.919 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:59:35.009 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 00:59:35.011 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 00:59:35.012 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 00:59:35.065 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 00:59:35.067 | INFO | __main__:main:97 - Starting bot +2022-12-31 00:59:38.031 | INFO | __main__:on_ready:47 - Ready +2022-12-31 00:59:56.894 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 01:01:28.743 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 01:01:28.745 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 01:01:28.746 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 01:01:28.812 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 01:01:28.813 | INFO | __main__:main:103 - Starting bot +2022-12-31 01:01:31.744 | INFO | __main__:on_ready:47 - Ready +2022-12-31 01:01:51.179 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 01:04:10.420 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 01:04:10.422 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 01:04:10.424 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 01:04:10.477 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 01:04:10.478 | INFO | __main__:main:103 - Starting bot +2022-12-31 01:04:13.364 | INFO | __main__:on_ready:47 - Ready +2022-12-31 01:04:40.806 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 01:05:18.754 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 01:05:18.757 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 01:05:18.759 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 01:05:18.814 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 01:05:18.815 | INFO | __main__:main:103 - Starting bot +2022-12-31 01:05:21.628 | INFO | __main__:on_ready:47 - Ready +2022-12-31 01:05:38.317 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 01:05:38.319 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 01:05:38.320 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_player +2022-12-31 01:05:38.376 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 01:05:38.377 | INFO | __main__:main:103 - Starting bot +2022-12-31 01:05:41.244 | INFO | __main__:on_ready:47 - Ready +2022-12-31 01:05:55.987 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 01:06:01.224 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 01:23:16.595 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 01:23:28.475 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 01:24:37.123 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 01:24:50.335 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 01:58:12.790 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 01:58:20.014 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 01:59:10.631 | INFO | __main__:reload:82 - Cog reloaded: cogs.calculate_time +2022-12-31 01:59:10.634 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_house +2022-12-31 01:59:10.637 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_player +2022-12-31 01:59:10.643 | INFO | __main__:reload:82 - Cog reloaded: cogs.setup +2022-12-31 01:59:19.128 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 01:59:38.455 | INFO | __main__:reload:82 - Cog reloaded: cogs.calculate_time +2022-12-31 01:59:38.458 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_house +2022-12-31 01:59:38.461 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_player +2022-12-31 01:59:38.466 | INFO | __main__:reload:82 - Cog reloaded: cogs.setup +2022-12-31 01:59:50.852 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 02:00:17.495 | INFO | __main__:reload:82 - Cog reloaded: cogs.calculate_time +2022-12-31 02:00:17.498 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_house +2022-12-31 02:00:17.504 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_player +2022-12-31 02:00:17.511 | INFO | __main__:reload:82 - Cog reloaded: cogs.setup +2022-12-31 02:00:23.854 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 02:05:09.401 | INFO | __main__:reload:82 - Cog reloaded: cogs.calculate_time +2022-12-31 02:05:09.403 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_house +2022-12-31 02:05:09.405 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_player +2022-12-31 02:05:09.410 | INFO | __main__:reload:82 - Cog reloaded: cogs.setup +2022-12-31 02:05:17.656 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 02:07:44.584 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:07:57.612 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 1 +2022-12-31 02:24:49.035 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:24:57.339 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 1 +2022-12-31 02:26:14.931 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:26:19.999 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 1 +2022-12-31 02:26:39.260 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:26:45.680 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 1 +2022-12-31 02:27:54.547 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:28:06.527 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 02:34:03.210 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:34:10.196 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 13 +2022-12-31 02:38:09.142 | INFO | cogs.check_house:check_house:14 - StronkStrix issued check_house on 13 +2022-12-31 02:51:05.762 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:51:13.650 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 2 +2022-12-31 02:51:48.324 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 02:51:54.843 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 9 +2022-12-31 02:53:07.409 | INFO | __main__:reload:82 - Cog reloaded: cogs.calculate_time +2022-12-31 02:53:07.412 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_house +2022-12-31 02:53:07.415 | INFO | __main__:reload:82 - Cog reloaded: cogs.check_player +2022-12-31 02:53:07.422 | INFO | __main__:reload:82 - Cog reloaded: cogs.setup +2022-12-31 02:53:13.627 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 2 +2022-12-31 03:06:14.318 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 03:23:55.210 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 03:24:02.044 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 1 +2022-12-31 03:25:07.223 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 03:25:19.233 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 03:51:54.466 | INFO | cogs.check_house:check_house:14 - Strix issued check_house on 15 +2022-12-31 03:57:38.031 | INFO | __main__:reload:88 - Cog reloaded: cogs.check_house +2022-12-31 03:58:58.080 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.calculate_time +2022-12-31 03:58:58.083 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.check_house +2022-12-31 03:58:58.213 | INFO | __main__:load_extensions:62 - Cog loaded: cogs.setup +2022-12-31 03:58:58.215 | INFO | __main__:main:103 - Starting bot +2022-12-31 03:59:01.084 | INFO | __main__:on_ready:47 - Ready +2022-12-31 03:59:17.775 | INFO | __main__:getHouseData:43 - Updated House Data +2022-12-31 05:31:46.609 | INFO | __main__:load_extensions:47 - Cog loaded: cogs.calculate_time +2022-12-31 05:31:46.610 | INFO | __main__:load_extensions:47 - Cog loaded: cogs.check_house +2022-12-31 05:33:30.089 | INFO | __main__:load_extensions:47 - Cog loaded: cogs.calculate_time +2022-12-31 05:33:30.090 | INFO | __main__:load_extensions:47 - Cog loaded: cogs.check_house +2022-12-31 05:33:30.144 | INFO | __main__:load_extensions:47 - Cog loaded: cogs.setup +2022-12-31 05:33:30.145 | INFO | __main__:main:88 - Starting bot +2022-12-31 05:33:33.109 | INFO | __main__:on_ready:32 - Ready +2022-12-31 05:34:49.532 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.calculate_time +2022-12-31 05:34:49.534 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.check_house +2022-12-31 05:34:49.589 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.setup +2022-12-31 05:34:49.590 | INFO | __main__:main:86 - Starting bot +2022-12-31 05:34:52.610 | INFO | __main__:on_ready:32 - Ready +2022-12-31 05:36:44.940 | INFO | __main__:reload:65 - Cog reloaded: cogs.calculate_time +2022-12-31 05:36:44.943 | INFO | __main__:reload:65 - Cog reloaded: cogs.check_house +2022-12-31 05:40:35.730 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.calculate_time +2022-12-31 05:40:35.732 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.check_house +2022-12-31 05:40:35.745 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.check_player +2022-12-31 05:40:35.802 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.setup +2022-12-31 05:40:35.803 | INFO | __main__:main:99 - Starting bot +2022-12-31 05:40:35.813 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:40:38.794 | INFO | __main__:on_ready:32 - Ready +2022-12-31 05:42:04.508 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.calculate_time +2022-12-31 05:42:04.510 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.check_house +2022-12-31 05:42:04.512 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.check_player +2022-12-31 05:42:04.567 | INFO | __main__:load_extensions:45 - Cog loaded: cogs.setup +2022-12-31 05:42:04.569 | INFO | __main__:main:99 - Starting bot +2022-12-31 05:42:04.578 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:42:07.551 | INFO | __main__:on_ready:32 - Ready +2022-12-31 05:43:58.073 | INFO | cogs.check_player:getHouseData:38 - Updated House Data +2022-12-31 05:44:21.405 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:44:25.343 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:44:42.657 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.calculate_time +2022-12-31 05:44:42.658 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_house +2022-12-31 05:44:42.660 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_player +2022-12-31 05:44:42.713 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.setup +2022-12-31 05:44:42.714 | INFO | __main__:main:100 - Starting bot +2022-12-31 05:44:42.724 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:44:45.571 | INFO | __main__:on_ready:33 - Ready +2022-12-31 05:45:01.406 | INFO | cogs.check_player:getHouseData:38 - Updated House Data +2022-12-31 05:46:58.719 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.calculate_time +2022-12-31 05:46:58.721 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_house +2022-12-31 05:46:58.723 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_player +2022-12-31 05:46:58.784 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.setup +2022-12-31 05:46:58.786 | INFO | __main__:main:106 - Starting bot +2022-12-31 05:46:58.795 | INFO | cogs.check_player:getHouseData:24 - Starting House Data Update +2022-12-31 05:47:01.721 | INFO | __main__:on_ready:33 - Ready +2022-12-31 05:47:18.686 | INFO | cogs.check_player:getHouseData:38 - Updated House Data +2022-12-31 05:47:36.872 | INFO | cogs.check_player:getHouseData:25 - Starting House Data Update +2022-12-31 05:47:48.495 | ERROR | __main__:unload:89 - Unloaded: cogs.setup +NoneType: None +2022-12-31 05:47:54.434 | INFO | cogs.check_player:getHouseData:39 - Updated House Data +2022-12-31 05:48:05.311 | ERROR | __main__:load:79 - Loaded: cogs.setup +NoneType: None +2022-12-31 05:49:20.395 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.calculate_time +2022-12-31 05:49:20.397 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_house +2022-12-31 05:49:20.399 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.check_player +2022-12-31 05:49:20.457 | INFO | __main__:load_extensions:46 - Cog loaded: cogs.setup +2022-12-31 05:49:20.458 | INFO | __main__:main:106 - Starting bot +2022-12-31 05:49:23.395 | INFO | cogs.check_player:getHouseData:25 - Starting House Data Update +2022-12-31 05:49:23.407 | INFO | __main__:on_ready:33 - Ready +2022-12-31 05:49:33.140 | ERROR | __main__:unload:92 - Failed:['Traceback (most recent call last):\n', ' File "c:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\main.py", line 88, in unload\n bot.unload_extension(f"cogs.{cog}")\n', ' File "C:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\sbsheriff\\lib\\site-packages\\disnake\\ext\\commands\\common_bot_base.py", line 512, in unload_extension\n raise errors.ExtensionNotLoaded(name)\n', "disnake.ext.commands.errors.ExtensionNotLoaded: Extension 'cogs.kek' has not been loaded.\n"] +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 111, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\client.py", line 700, in _run_event + await coro(*args, **kwargs) + │ │ └ {} + │ └ (,) + └ > + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1361, in on_application_command + await self.process_application_commands(interaction) + │ │ └ + │ └ + └ <__main__.MyBot object at 0x000001CD112A1960> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1353, in process_application_commands + await app_command.invoke(interaction) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\slash_core.py", line 730, in invoke + await call_param_func(self.callback, inter, self.cog, **kwargs) + │ │ │ │ │ │ └ {'cog': 'kek'} + │ │ │ │ │ └ None + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\params.py", line 1022, in call_param_func + return await maybe_coroutine(safe_call, function, **kwargs) + │ │ │ └ {'cog': 'kek', 'self': None, 'inter': .unload at 0x000001CD112972E0> + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\utils.py", line 599, in maybe_coroutine + return await value + └ .unload at 0x000001CD125EE570> + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 88, in unload + bot.unload_extension(f"cogs.{cog}") + │ └ + └ <__main__.MyBot object at 0x000001CD112A1960> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 512, in unload_extension + raise errors.ExtensionNotLoaded(name) + │ │ └ 'cogs.kek' + │ └ + └ + └ + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\ban_checker.py", line 21, in setup + bot.add_cog(Parish_Intel(bot)) + │ │ │ └ <__main__.MyBot object at 0x00000215CB4A3010> + │ │ └ + │ └ + └ <__main__.MyBot object at 0x00000215CB4A3010> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 300, in add_cog + raise disnake.ClientException(f"Cog named {cog_name!r} already loaded") + │ └ + └ + +disnake.errors.ClientException: Cog named 'Parish_Intel' already loaded + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 113, in + if __name__ == "__main__": + └ '__main__' + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\client.py", line 700, in _run_event + await coro(*args, **kwargs) + │ │ └ {} + │ └ (,) + └ > + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1361, in on_application_command + await self.process_application_commands(interaction) + │ │ └ + │ └ + └ <__main__.MyBot object at 0x00000215CB4A3010> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1353, in process_application_commands + await app_command.invoke(interaction) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\slash_core.py", line 730, in invoke + await call_param_func(self.callback, inter, self.cog, **kwargs) + │ │ │ │ │ │ └ {'cog': 'ban_checker'} + │ │ │ │ │ └ None + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\params.py", line 1022, in call_param_func + return await maybe_coroutine(safe_call, function, **kwargs) + │ │ │ └ {'cog': 'ban_checker', 'self': None, 'inter': .load at 0x00000215CB483B50> + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\utils.py", line 599, in maybe_coroutine + return await value + └ .load at 0x00000215CFC89D90> + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 80, in load + bot.load_extension(f"cogs.{cog}") + │ └ + └ <__main__.MyBot object at 0x00000215CB4A3010> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 475, in load_extension + self._load_from_module_spec(spec, name) + │ │ │ └ 'cogs.ban_checker' + │ │ └ ModuleSpec(name='cogs.ban_checker', loader=<_frozen_importlib_external.SourceFileLoader object at 0x00000215CC91F340>, origin... + │ └ + └ <__main__.MyBot object at 0x00000215CB4A3010> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 421, in _load_from_module_spec + raise errors.ExtensionFailed(key, e) from e + │ │ └ 'cogs.ban_checker' + │ └ + └ ", line 883, in exec_module\n', ' File "", line 241, in _call_with_frames_removed\n', ' File "c:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\cogs\\player_intel.py", line 12, in \n from embed_factory import castle_design_confirmation\n', ' File "c:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\embed_factory\\castle_design_confirmation.py", line 2, in \n import embeds\n', "ModuleNotFoundError: No module named 'embeds'\n", '\nThe above exception was the direct cause of the following exception:\n\n', 'Traceback (most recent call last):\n', ' File "c:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\main.py", line 77, in load\n bot.load_extension(f"cogs.{cog}")\n', ' File "C:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\sbsheriff\\lib\\site-packages\\disnake\\ext\\commands\\common_bot_base.py", line 475, in load_extension\n self._load_from_module_spec(spec, name)\n', ' File "C:\\Users\\Strix\\Dev\\stormbrigade_sheriff\\sbsheriff\\lib\\site-packages\\disnake\\ext\\commands\\common_bot_base.py", line 407, in _load_from_module_spec\n raise errors.ExtensionFailed(key, e) from e\n', "disnake.ext.commands.errors.ExtensionFailed: Extension 'cogs.player_intel' raised an error: ModuleNotFoundError: No module named 'embeds'\n"] +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 404, in _load_from_module_spec + spec.loader.exec_module(lib) # type: ignore + │ │ │ └ + │ │ └ + │ └ <_frozen_importlib_external.SourceFileLoader object at 0x000001E9FB9CAD40> + └ ModuleSpec(name='cogs.player_intel', loader=<_frozen_importlib_external.SourceFileLoader object at 0x000001E9FB9CAD40>, origi... + + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\player_intel.py", line 12, in + from embed_factory import castle_design_confirmation + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\embed_factory\castle_design_confirmation.py", line 2, in + import embeds + +ModuleNotFoundError: No module named 'embeds' + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 108, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\client.py", line 700, in _run_event + await coro(*args, **kwargs) + │ │ └ {} + │ └ (,) + └ > + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1361, in on_application_command + await self.process_application_commands(interaction) + │ │ └ + │ └ + └ <__main__.MyBot object at 0x000001E9FA42DEA0> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\interaction_bot_base.py", line 1353, in process_application_commands + await app_command.invoke(interaction) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\slash_core.py", line 730, in invoke + await call_param_func(self.callback, inter, self.cog, **kwargs) + │ │ │ │ │ │ └ {'cog': 'player_intel'} + │ │ │ │ │ └ None + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\params.py", line 1022, in call_param_func + return await maybe_coroutine(safe_call, function, **kwargs) + │ │ │ └ {'cog': 'player_intel', 'self': None, 'inter': .load at 0x000001E9FA42BC70> + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\utils.py", line 599, in maybe_coroutine + return await value + └ .load at 0x000001E9FA6A0190> + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 77, in load + bot.load_extension(f"cogs.{cog}") + │ └ + └ <__main__.MyBot object at 0x000001E9FA42DEA0> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 475, in load_extension + self._load_from_module_spec(spec, name) + │ │ │ └ 'cogs.player_intel' + │ │ └ ModuleSpec(name='cogs.player_intel', loader=<_frozen_importlib_external.SourceFileLoader object at 0x000001E9FB9CAD40>, origi... + │ └ + └ <__main__.MyBot object at 0x000001E9FA42DEA0> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\commands\common_bot_base.py", line 407, in _load_from_module_spec + raise errors.ExtensionFailed(key, e) from e + │ │ └ 'cogs.player_intel' + │ └ + └ Starting VM advert updates +2023-06-26 04:25:06.398 | INFO | cogs.vm_tracker:setup:97 - cogs.vm_tracker Loaded +2023-06-26 04:25:06.398 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:26:45.338 | INFO | cogs.vm_tracker:setup:97 - cogs.vm_tracker Loaded +2023-06-26 04:26:45.344 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:27:09.610 | INFO | cogs.vm_tracker:setup:98 - cogs.vm_tracker Loaded +2023-06-26 04:27:09.611 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:27:10.644 | INFO | cogs.vm_tracker:setup:98 - cogs.vm_tracker Loaded +2023-06-26 04:27:10.645 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:27:41.963 | INFO | cogs.vm_tracker:setup:98 - cogs.vm_tracker Loaded +2023-06-26 04:27:41.965 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:30:19.668 | INFO | cogs.vm_tracker:setup:97 - cogs.vm_tracker Loaded +2023-06-26 04:30:19.670 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:31:24.264 | INFO | cogs.vm_tracker:setup:98 - cogs.vm_tracker Loaded +2023-06-26 04:31:24.265 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:31:37.426 | INFO | cogs.vm_tracker:setup:99 - cogs.vm_tracker Loaded +2023-06-26 04:31:37.427 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:31:57.649 | INFO | cogs.vm_tracker:setup:99 - cogs.vm_tracker Loaded +2023-06-26 04:31:57.651 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:32:13.813 | INFO | cogs.vm_tracker:setup:99 - cogs.vm_tracker Loaded +2023-06-26 04:32:13.814 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:35:05.550 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 04:35:05.552 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 04:35:05.554 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 04:35:05.555 | INFO | cogs.vm_tracker:setup:99 - cogs.vm_tracker Loaded +2023-06-26 04:35:05.557 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 04:35:05.558 | INFO | __main__:main:101 - Starting bot +2023-06-26 04:35:09.615 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 04:35:09.617 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:35:09.619 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 04:35:09.636 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 04:35:22.870 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 04:35:22.870 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 04:35:37.864 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 04:35:37.866 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 04:35:51.761 | INFO | cogs.vm_tracker:setup:89 - cogs.vm_tracker Loaded +2023-06-26 04:35:51.764 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:35:57.351 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 04:35:57.352 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 04:36:33.092 | INFO | cogs.vm_tracker:setup:99 - cogs.vm_tracker Loaded +2023-06-26 04:36:33.094 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:37:46.810 | INFO | cogs.vm_tracker:setup:97 - cogs.vm_tracker Loaded +2023-06-26 04:37:46.811 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:38:36.303 | INFO | cogs.vm_tracker:setup:97 - cogs.vm_tracker Loaded +2023-06-26 04:38:36.304 | INFO | cogs.vm_tracker:Update_VM_Embeds:19 - ---> Starting VM advert updates +2023-06-26 04:48:31.307 | INFO | cogs.vm_tracker:setup:102 - cogs.vm_tracker Loaded +2023-06-26 04:48:31.308 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 04:49:34.945 | INFO | cogs.vm_tracker:setup:102 - cogs.vm_tracker Loaded +2023-06-26 04:49:34.959 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 04:50:55.298 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 04:50:55.300 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 04:50:55.301 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 04:50:55.311 | INFO | cogs.vm_tracker:setup:102 - cogs.vm_tracker Loaded +2023-06-26 04:50:55.312 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 04:50:55.313 | INFO | __main__:main:101 - Starting bot +2023-06-26 04:51:04.597 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 04:51:04.599 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 04:51:04.600 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 04:51:04.635 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 04:51:17.659 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 04:51:17.660 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 04:51:33.740 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 04:51:33.741 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 04:51:54.292 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 04:51:54.294 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 04:59:32.359 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 04:59:32.361 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:02:58.782 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:02:58.783 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 05:02:58.785 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 05:02:58.802 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:02:58.803 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 05:02:58.804 | INFO | __main__:main:101 - Starting bot +2023-06-26 05:03:01.720 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 05:03:01.723 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:03:01.724 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 05:03:01.758 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 05:03:12.283 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 05:03:12.284 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 05:03:33.028 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 05:03:33.030 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 05:03:53.142 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 05:03:53.143 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 05:04:55.331 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:04:55.333 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:05:19.569 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:05:19.571 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:05:39.773 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:05:39.774 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:05:45.839 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:05:45.840 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:09:41.366 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:10:39.977 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:10:39.978 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:11:12.325 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:11:12.327 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:13:03.556 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 05:13:03.558 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:15:59.437 | INFO | cogs.vm_tracker:setup:113 - cogs.vm_tracker Loaded +2023-06-26 05:15:59.438 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:17:52.651 | INFO | cogs.vm_tracker:setup:114 - cogs.vm_tracker Loaded +2023-06-26 05:17:52.652 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:18:37.114 | INFO | cogs.vm_tracker:setup:115 - cogs.vm_tracker Loaded +2023-06-26 05:18:37.116 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:20:39.348 | INFO | cogs.vm_tracker:setup:115 - cogs.vm_tracker Loaded +2023-06-26 05:20:39.352 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:20:50.426 | INFO | cogs.vm_tracker:setup:115 - cogs.vm_tracker Loaded +2023-06-26 05:20:50.427 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:23:10.968 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:23:10.969 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:23:58.477 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:23:58.478 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:37:15.774 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:37:15.776 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:45:40.241 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:45:40.243 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 05:45:40.245 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 05:45:40.266 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:45:40.267 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 05:45:40.268 | INFO | __main__:main:101 - Starting bot +2023-06-26 05:45:43.483 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 05:45:43.484 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:45:43.486 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 05:45:43.544 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 05:45:56.207 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 05:45:56.208 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 05:46:08.816 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 05:46:08.817 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 05:46:13.094 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:46:13.096 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 05:46:13.098 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 05:46:13.108 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:46:13.109 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 05:46:13.110 | INFO | __main__:main:101 - Starting bot +2023-06-26 05:46:16.011 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 05:46:16.012 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:46:16.014 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 05:46:16.069 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 05:46:25.899 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 05:46:25.901 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 05:47:25.479 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:47:25.480 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 05:47:25.483 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 05:47:25.494 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:47:25.496 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 05:47:25.497 | INFO | __main__:main:101 - Starting bot +2023-06-26 05:47:28.399 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 05:47:28.401 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:47:28.403 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 05:47:28.468 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 05:47:37.587 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 05:47:37.589 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 05:47:50.677 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 05:47:50.678 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 05:48:13.445 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 05:48:13.446 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 05:49:18.202 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 05:49:18.203 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 05:49:18.204 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 05:49:18.214 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:49:18.215 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 05:49:18.216 | INFO | __main__:main:101 - Starting bot +2023-06-26 05:49:21.177 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 05:49:21.179 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:49:21.181 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 05:49:21.201 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 05:49:29.984 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 05:49:29.985 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 05:49:42.821 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 05:49:42.822 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 05:50:02.612 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 05:50:02.613 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 05:54:09.824 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:54:09.826 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:55:13.505 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:55:13.506 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 05:55:24.624 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 05:55:24.624 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 06:02:20.183 | INFO | cogs.vm_tracker:setup:112 - cogs.vm_tracker Loaded +2023-06-26 06:02:20.183 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 06:08:43.213 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 06:08:43.215 | INFO | cogs.vm_tracker:Update_VM_Embeds:20 - ---> Starting VM advert updates +2023-06-26 06:25:43.061 | INFO | cogs.vm_tracker:setup:111 - cogs.vm_tracker Loaded +2023-06-26 06:25:43.064 | INFO | cogs.vm_tracker:update_vm_embeds:37 - ---> Starting VM advert updates +2023-06-26 06:32:22.427 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:32:22.430 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:33:02.804 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:33:02.805 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:34:01.387 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:34:01.389 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:34:31.785 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:34:31.787 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:35:49.585 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:35:49.586 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:37:29.226 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 06:37:29.228 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 06:37:29.229 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 06:37:29.240 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 06:37:29.241 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 06:37:29.242 | INFO | __main__:main:101 - Starting bot +2023-06-26 06:37:32.914 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 06:37:32.917 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 06:37:32.918 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 06:37:32.938 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 06:37:43.568 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 06:37:43.569 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 06:37:57.821 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 06:37:57.823 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 06:38:16.976 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 06:38:16.977 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 07:07:29.269 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 07:37:29.267 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 08:07:29.274 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 08:37:29.244 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 08:37:29.250 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 08:37:29.269 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 08:37:39.840 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 08:37:39.841 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 08:37:57.671 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 08:37:57.672 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 08:38:20.184 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 08:38:20.185 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 09:07:29.270 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 09:09:55.610 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 09:09:55.613 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 09:10:12.037 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 09:10:12.038 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 09:10:12.040 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 09:10:12.043 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 09:10:12.044 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 09:10:12.045 | INFO | __main__:main:101 - Starting bot +2023-06-26 09:10:15.015 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 09:10:15.017 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 09:10:15.018 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 09:10:15.035 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 09:10:27.756 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 09:10:27.758 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 09:10:41.067 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 09:10:41.068 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 09:10:59.360 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 09:10:59.361 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 09:54:03.970 | INFO | cogs.relationship_manager:setup:54 - cogs.relationship_manager Loaded +2023-06-26 09:54:03.972 | INFO | __main__:load:75 - Loading: cogs.relationship_manager +2023-06-26 09:59:21.483 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:00:58.397 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:06:21.678 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:10:12.063 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 10:11:09.578 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:12:02.137 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:12:06.363 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:12:06.365 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 10:12:06.374 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 10:12:06.375 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 10:12:06.377 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 10:12:06.380 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 10:12:06.381 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 10:12:06.381 | INFO | __main__:main:101 - Starting bot +2023-06-26 10:12:09.305 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 10:12:09.307 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 10:12:09.308 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 10:12:09.341 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 10:12:22.153 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 10:12:22.154 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 10:12:36.631 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 10:12:36.632 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 10:12:53.741 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 10:12:53.742 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 10:13:13.571 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:13:26.671 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:14:06.006 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:20:13.776 | INFO | cogs.relationship_manager:setup:68 - cogs.relationship_manager Loaded +2023-06-26 10:20:13.777 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 10:20:13.785 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 10:20:13.787 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 10:20:13.788 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 10:20:13.791 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 10:20:13.791 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 10:20:13.792 | INFO | __main__:main:101 - Starting bot +2023-06-26 10:20:16.936 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 10:20:16.938 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 10:20:16.939 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 10:20:16.966 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 10:20:25.510 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 10:20:25.511 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 10:20:39.618 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 10:20:39.619 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 10:20:58.096 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 10:20:58.097 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 10:20:58.947 | INFO | cogs.relationship_manager:setup:68 - cogs.relationship_manager Loaded +2023-06-26 10:21:25.212 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:21:56.489 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:24:19.953 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:25:10.416 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:25:35.693 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:25:50.842 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:25:50.843 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 10:25:50.853 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 10:25:50.854 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 10:25:50.856 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 10:25:50.859 | INFO | cogs.vm_tracker:setup:116 - cogs.vm_tracker Loaded +2023-06-26 10:25:50.860 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 10:25:50.861 | INFO | __main__:main:101 - Starting bot +2023-06-26 10:25:59.553 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 10:25:59.555 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 10:25:59.556 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 10:25:59.597 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 10:26:10.520 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 10:26:10.521 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 10:26:23.519 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 10:26:23.520 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 10:26:31.610 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:26:42.370 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 10:26:42.371 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 10:27:26.030 | INFO | cogs.relationship_manager:setup:66 - cogs.relationship_manager Loaded +2023-06-26 10:31:14.344 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:31:25.456 | INFO | cogs.relationship_manager:setup:67 - cogs.relationship_manager Loaded +2023-06-26 10:47:19.868 | INFO | cogs.relationship_manager:setup:75 - cogs.relationship_manager Loaded +2023-06-26 10:50:38.909 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 10:58:59.006 | INFO | cogs.vm_tracker:setup:120 - cogs.vm_tracker Loaded +2023-06-26 10:58:59.008 | INFO | cogs.vm_tracker:update_vm_embeds:41 - ---> Starting VM advert updates +2023-06-26 11:10:16.485 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 11:10:16.487 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:12:54.066 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 11:12:54.067 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:14:27.047 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 11:14:27.049 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:16:59.517 | INFO | cogs.vm_tracker:setup:124 - cogs.vm_tracker Loaded +2023-06-26 11:16:59.519 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:20:14.374 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:20:14.375 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:21:08.912 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:21:08.914 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:22:27.673 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:22:27.674 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:23:12.042 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:23:12.044 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:28:34.050 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:34:39.374 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:34:39.375 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:34:39.385 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:34:39.386 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:34:39.388 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:34:39.405 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:34:39.407 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:34:39.408 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:34:43.150 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:34:43.152 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:34:43.155 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:34:43.173 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:34:52.419 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:34:52.420 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:35:06.015 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:35:06.016 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:35:22.240 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:35:22.241 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 11:36:12.679 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:36:12.681 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:36:12.689 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:36:12.691 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:36:12.693 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:36:12.703 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:36:12.704 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:36:12.705 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:36:15.782 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:36:15.784 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:36:15.786 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:36:15.835 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:36:25.742 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:36:25.743 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:36:41.445 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:36:41.446 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:36:59.879 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:36:59.880 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 11:38:01.498 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:38:01.498 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:51:31.398 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:51:31.400 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:51:31.408 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:51:31.410 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:51:31.411 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:51:31.417 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:51:31.418 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:51:31.418 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:51:34.482 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:51:34.484 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:51:34.486 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:51:34.576 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:51:47.025 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:51:47.026 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:52:01.078 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:52:01.079 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:52:19.765 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:52:19.766 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 11:52:52.908 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:52:52.910 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:53:25.231 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:53:25.232 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:53:43.422 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:53:43.423 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:54:03.615 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:54:03.616 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:54:45.452 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:54:45.453 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:54:45.462 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:54:45.464 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:54:45.465 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:54:45.475 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:54:45.477 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:54:45.478 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:54:48.478 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:54:48.480 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:54:48.481 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:54:48.520 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:54:55.910 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:54:55.911 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:55:08.909 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:55:08.910 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:55:15.498 | INFO | cogs.vm_tracker:setup:129 - cogs.vm_tracker Loaded +2023-06-26 11:55:15.500 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:55:27.540 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:55:27.541 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 11:55:34.629 | INFO | cogs.vm_tracker:setup:128 - cogs.vm_tracker Loaded +2023-06-26 11:55:34.631 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:55:42.702 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 11:55:42.703 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:57:00.005 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:57:00.006 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:57:00.014 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:57:00.015 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:57:00.017 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:57:00.028 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 11:57:00.029 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:57:00.030 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:57:02.951 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:57:02.953 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:57:02.955 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:57:02.969 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:57:11.532 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:57:11.532 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:57:24.682 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:57:24.684 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:57:42.338 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:57:42.340 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 11:57:57.740 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 11:57:57.742 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 11:57:57.752 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 11:57:57.753 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 11:57:57.755 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 11:57:57.766 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 11:57:57.767 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 11:57:57.768 | INFO | __main__:main:101 - Starting bot +2023-06-26 11:58:00.620 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 11:58:00.622 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 11:58:00.623 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 11:58:00.636 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 11:58:08.460 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 11:58:08.461 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 11:58:22.282 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 11:58:22.283 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 11:58:39.774 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 11:58:39.775 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:00:39.959 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:00:39.961 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:00:39.970 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:00:39.971 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:00:39.972 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:00:39.982 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:00:39.983 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:00:39.984 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:00:42.867 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:00:42.869 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:00:42.870 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:00:42.920 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:00:51.819 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:00:51.820 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:01:06.029 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:01:06.031 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:01:23.909 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:01:23.910 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:03:12.146 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:03:12.147 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:03:12.157 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:03:12.158 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:03:12.160 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:03:12.170 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:03:12.171 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:03:12.172 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:03:15.064 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:03:15.066 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:03:15.067 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:03:15.104 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:03:23.553 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:03:23.554 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:03:36.827 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:03:36.828 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:03:55.175 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:03:55.176 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:04:16.260 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:04:16.262 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:06:53.539 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:06:53.540 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:06:53.548 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:06:53.550 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:06:53.552 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:06:53.566 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:06:53.568 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:06:53.569 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:06:56.503 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:06:56.505 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:06:56.507 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:06:56.548 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:07:05.748 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:07:05.750 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:07:19.217 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:07:19.217 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:07:36.153 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:07:36.153 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:07:48.704 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:07:48.706 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:07:48.714 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:07:48.715 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:07:48.717 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:07:48.728 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:07:48.729 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:07:48.730 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:07:51.746 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:07:51.748 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:07:51.749 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:07:51.767 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:07:59.881 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:07:59.882 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:08:12.473 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:08:12.474 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:08:27.829 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:08:27.830 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:08:27.838 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:08:27.840 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:08:27.841 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:08:27.851 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:08:27.853 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:08:27.854 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:08:30.790 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:08:30.792 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:08:30.793 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:08:30.825 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:08:39.486 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:08:39.486 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:08:53.593 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:08:53.594 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:09:15.203 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:09:15.204 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:29:58.241 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:29:58.242 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:32:46.865 | INFO | cogs.vm_tracker:setup:127 - cogs.vm_tracker Loaded +2023-06-26 12:32:46.867 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:40:17.195 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:40:17.196 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:40:22.138 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:40:22.139 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:40:22.148 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:40:22.149 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:40:22.151 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:40:22.154 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:40:22.155 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:40:22.155 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:40:25.215 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:40:25.217 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:40:25.219 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:40:25.251 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:40:32.783 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:40:32.784 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:40:45.982 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:40:45.984 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:41:04.402 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:41:04.403 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:43:15.217 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:43:15.218 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:43:15.226 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:43:15.227 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:43:15.228 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:43:15.240 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:43:15.241 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:43:15.242 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:43:18.323 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:43:18.325 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:43:18.327 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:43:18.357 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:43:26.888 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:43:26.890 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:43:39.905 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:43:39.906 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:44:01.313 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:44:01.314 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:44:09.480 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:44:09.482 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:46:43.688 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:46:43.689 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:46:43.697 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:46:43.699 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:46:43.700 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:46:43.712 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:46:43.714 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:46:43.714 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:46:46.644 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:46:46.645 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:46:46.647 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:46:46.667 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:46:56.796 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:46:56.798 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:47:09.486 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:47:09.487 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:47:10.660 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:47:10.662 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:47:25.256 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:47:25.257 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 12:50:07.015 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:50:07.017 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:50:07.027 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:50:07.028 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:50:07.029 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:50:07.035 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:50:07.036 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:50:07.037 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:50:10.773 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:50:10.775 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:50:10.777 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:50:10.819 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:50:18.912 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:50:18.913 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:50:36.467 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:50:36.468 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:50:36.476 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:50:36.478 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:50:36.479 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:50:36.492 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:50:36.493 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:50:36.494 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:50:39.457 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:50:39.458 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:50:39.460 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:50:39.549 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:50:57.805 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:50:57.807 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:50:57.817 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:50:57.818 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:50:57.819 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:50:57.834 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:50:57.836 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:50:57.837 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:51:00.910 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:51:00.912 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:51:00.914 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:51:00.927 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:51:08.933 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:51:08.934 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:51:20.230 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 12:51:20.231 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 12:51:20.240 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 12:51:20.241 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 12:51:20.243 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 12:51:20.254 | INFO | cogs.vm_tracker:setup:125 - cogs.vm_tracker Loaded +2023-06-26 12:51:20.255 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 12:51:20.256 | INFO | __main__:main:101 - Starting bot +2023-06-26 12:51:23.216 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 12:51:23.218 | INFO | cogs.vm_tracker:update_vm_embeds:46 - ---> Starting VM advert updates +2023-06-26 12:51:23.219 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 12:51:23.236 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 12:51:31.267 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-26 12:51:31.268 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-26 12:51:46.889 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-26 12:51:46.890 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-26 12:52:04.916 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-26 12:52:04.917 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-26 13:40:33.583 | INFO | cogs.vm_tracker:setup:149 - cogs.vm_tracker Loaded +2023-06-26 13:40:33.585 | INFO | cogs.vm_tracker:update_vm_embeds:70 - ---> Starting VM advert updates +2023-06-26 13:43:21.004 | INFO | cogs.vm_tracker:setup:157 - cogs.vm_tracker Loaded +2023-06-26 13:43:21.005 | INFO | cogs.vm_tracker:update_vm_embeds:78 - ---> Starting VM advert updates +2023-06-26 13:45:16.130 | INFO | cogs.vm_tracker:setup:157 - cogs.vm_tracker Loaded +2023-06-26 13:45:16.132 | INFO | cogs.vm_tracker:update_vm_embeds:78 - ---> Starting VM advert updates +2023-06-26 13:45:47.453 | INFO | cogs.vm_tracker:setup:157 - cogs.vm_tracker Loaded +2023-06-26 13:45:47.454 | INFO | cogs.vm_tracker:update_vm_embeds:78 - ---> Starting VM advert updates +2023-06-26 13:47:02.159 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:47:02.161 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:48:41.108 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:48:41.110 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:50:43.298 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:50:43.299 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:50:46.318 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:50:46.320 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:51:58.996 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:51:58.998 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:59:49.430 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:59:49.432 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 13:59:50.442 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 13:59:50.444 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:07:41.865 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 14:07:41.867 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:07:47.172 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 14:07:47.173 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 14:07:47.183 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 14:07:47.184 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 14:07:47.186 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 14:07:47.198 | INFO | cogs.vm_tracker:setup:160 - cogs.vm_tracker Loaded +2023-06-26 14:07:47.199 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 14:07:47.200 | INFO | __main__:main:101 - Starting bot +2023-06-26 14:07:50.980 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-26 14:07:50.982 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:07:50.983 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 14:07:51.015 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-26 14:14:19.707 | INFO | cogs.vm_tracker:setup:166 - cogs.vm_tracker Loaded +2023-06-26 14:14:19.708 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:15:48.551 | INFO | cogs.vm_tracker:setup:168 - cogs.vm_tracker Loaded +2023-06-26 14:15:48.552 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:16:07.733 | INFO | cogs.vm_tracker:setup:168 - cogs.vm_tracker Loaded +2023-06-26 14:16:07.735 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:16:52.153 | INFO | cogs.vm_tracker:setup:169 - cogs.vm_tracker Loaded +2023-06-26 14:16:52.155 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:23:24.054 | INFO | cogs.vm_tracker:setup:171 - cogs.vm_tracker Loaded +2023-06-26 14:23:24.056 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:24:09.473 | INFO | cogs.vm_tracker:setup:171 - cogs.vm_tracker Loaded +2023-06-26 14:24:09.475 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:24:23.603 | INFO | cogs.vm_tracker:setup:171 - cogs.vm_tracker Loaded +2023-06-26 14:24:23.605 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:29:55.862 | INFO | cogs.vm_tracker:setup:171 - cogs.vm_tracker Loaded +2023-06-26 14:29:55.864 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:30:44.045 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 14:30:44.046 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 14:30:44.055 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 14:30:44.056 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 14:30:44.058 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 14:30:44.069 | INFO | cogs.vm_tracker:setup:171 - cogs.vm_tracker Loaded +2023-06-26 14:30:44.070 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 14:30:44.070 | INFO | __main__:main:101 - Starting bot +2023-06-26 14:30:48.255 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:30:48.257 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 14:31:30.681 | INFO | cogs.vm_tracker:setup:170 - cogs.vm_tracker Loaded +2023-06-26 14:31:30.682 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:36:26.598 | INFO | cogs.vm_tracker:setup:162 - cogs.vm_tracker Loaded +2023-06-26 14:36:26.600 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:36:40.158 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 14:36:40.158 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 14:36:40.167 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 14:36:40.168 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 14:36:40.170 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 14:36:40.183 | INFO | cogs.vm_tracker:setup:162 - cogs.vm_tracker Loaded +2023-06-26 14:36:40.184 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 14:36:40.186 | INFO | __main__:main:101 - Starting bot +2023-06-26 14:36:43.144 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:36:43.145 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 14:36:56.292 | INFO | cogs.vm_tracker:setup:162 - cogs.vm_tracker Loaded +2023-06-26 14:36:56.294 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:38:15.580 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 14:38:15.581 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 14:38:15.590 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 14:38:15.591 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 14:38:15.593 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 14:38:15.607 | INFO | cogs.vm_tracker:setup:162 - cogs.vm_tracker Loaded +2023-06-26 14:38:15.608 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 14:38:15.610 | INFO | __main__:main:101 - Starting bot +2023-06-26 14:38:18.566 | INFO | cogs.vm_tracker:update_vm_embeds:81 - ---> Starting VM advert updates +2023-06-26 14:38:18.568 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 15:34:05.967 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-26 15:34:05.969 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM advert updates +2023-06-26 15:34:19.122 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-26 15:34:19.123 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 15:40:14.812 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 15:40:14.813 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.relationship_manager +2023-06-26 15:40:14.847 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 15:40:14.849 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.setup +2023-06-26 15:40:14.850 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.shk_info_tasks +2023-06-26 15:40:14.854 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-26 15:40:14.855 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.vm_tracker +2023-06-26 15:40:14.856 | INFO | __main__:main:102 - Starting bot +2023-06-26 15:40:18.162 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 15:40:18.163 | INFO | __main__:on_ready:31 - Ready - using 2.9.0 +2023-06-26 15:41:02.705 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 15:41:02.706 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.relationship_manager +2023-06-26 15:41:02.717 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 15:41:02.719 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.setup +2023-06-26 15:41:02.721 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.shk_info_tasks +2023-06-26 15:41:02.725 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-26 15:41:02.726 | INFO | __main__:load_extensions:44 - Cog loaded: cogs.vm_tracker +2023-06-26 15:41:02.726 | INFO | __main__:main:102 - Starting bot +2023-06-26 15:41:06.012 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 15:41:06.013 | INFO | __main__:on_ready:31 - Ready - using 2.9.0 +2023-06-26 15:57:05.137 | INFO | __main__:load:76 - Loading: cogs.check_player +2023-06-26 15:57:37.918 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 15:58:26.923 | INFO | cogs.check_player:setup:54 - cogs.check_player Loaded +2023-06-26 15:58:33.022 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 15:59:16.396 | INFO | cogs.check_player:setup:54 - cogs.check_player Loaded +2023-06-26 15:59:21.038 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 15:59:51.716 | INFO | cogs.check_player:setup:55 - cogs.check_player Loaded +2023-06-26 16:00:01.974 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:01:37.757 | INFO | cogs.check_player:setup:55 - cogs.check_player Loaded +2023-06-26 16:01:46.096 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:07:11.114 | INFO | cogs.check_player:setup:55 - cogs.check_player Loaded +2023-06-26 16:07:11.116 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.check_player +2023-06-26 16:07:11.123 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-26 16:07:11.124 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-26 16:07:11.136 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-26 16:07:11.137 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-26 16:07:11.139 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-26 16:07:11.142 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-26 16:07:11.143 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-26 16:07:11.144 | INFO | __main__:main:101 - Starting bot +2023-06-26 16:07:14.123 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 16:07:14.125 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-26 16:14:24.506 | INFO | cogs.check_player:setup:54 - cogs.check_player Loaded +2023-06-26 16:15:02.043 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:15:13.976 | INFO | cogs.check_player:setup:54 - cogs.check_player Loaded +2023-06-26 16:15:26.712 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:22:05.012 | INFO | cogs.check_player:setup:55 - cogs.check_player Loaded +2023-06-26 16:22:19.930 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:26:16.440 | INFO | cogs.check_player:setup:55 - cogs.check_player Loaded +2023-06-26 16:26:23.991 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:44:28.598 | INFO | cogs.check_player:setup:57 - cogs.check_player Loaded +2023-06-26 16:44:36.686 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:45:35.183 | INFO | cogs.check_player:setup:59 - cogs.check_player Loaded +2023-06-26 16:45:40.460 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:46:19.601 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 16:46:23.645 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 16:46:29.660 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:48:03.613 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 16:48:14.132 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:49:09.280 | INFO | cogs.check_player:setup:59 - cogs.check_player Loaded +2023-06-26 16:49:29.488 | INFO | cogs.check_player:setup:59 - cogs.check_player Loaded +2023-06-26 16:49:36.222 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:53:44.208 | INFO | cogs.check_player:setup:59 - cogs.check_player Loaded +2023-06-26 16:53:50.547 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:54:37.786 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 16:54:47.008 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:57:30.416 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 16:59:19.595 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:04:07.494 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:04:19.437 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:04:32.666 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:04:39.474 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:05:35.343 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:05:40.615 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:07:11.170 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 17:22:51.431 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:23:03.014 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:29:37.416 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:29:45.660 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:30:26.893 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:30:33.438 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:30:53.184 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:30:58.540 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 17:31:16.424 | INFO | cogs.check_player:setup:60 - cogs.check_player Loaded +2023-06-26 17:31:20.443 | INFO | cogs.check_player:check_player:18 - check_player used by .strix. using {'player': 'StronkStrix'} +2023-06-26 18:07:11.168 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 19:07:11.178 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 20:07:11.180 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 21:07:11.197 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 22:07:11.201 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-26 23:07:11.209 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 00:07:11.200 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 01:07:11.217 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 02:07:11.217 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 03:07:11.238 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 04:07:11.256 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 05:07:11.260 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 06:07:11.278 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 07:07:11.281 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 07:56:19.268 | INFO | cogs.shk_info_tasks:getHouseData:21 - Starting House Data Update +2023-06-27 07:56:19.280 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-27 07:56:32.121 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished World 2 update +2023-06-27 07:56:32.122 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-27 07:56:55.254 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Global Conflict 8 update +2023-06-27 07:56:55.255 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-27 07:57:15.419 | INFO | cogs.shk_info_tasks:getHouseData:52 - Finished Europe 8 update +2023-06-27 07:57:15.419 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished House Data update +2023-06-27 07:57:48.809 | INFO | cogs.shk_info_tasks:setup:56 - cogs.shk_info_tasks Loaded +2023-06-27 07:58:08.008 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-27 08:02:54.004 | INFO | cogs.check_player:setup:88 - cogs.check_player Loaded +2023-06-27 08:07:11.283 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 09:07:11.297 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 09:26:54.776 | INFO | cogs.check_player:setup:88 - cogs.check_player Loaded +2023-06-27 09:26:54.777 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.check_player +2023-06-27 09:26:54.791 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-06-27 09:26:54.793 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-06-27 09:26:54.822 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-06-27 09:26:54.824 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-06-27 09:26:54.826 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-27 09:26:54.827 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-06-27 09:26:54.836 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-27 09:26:54.838 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-06-27 09:26:54.839 | INFO | __main__:main:101 - Starting bot +2023-06-27 09:26:57.754 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 09:26:57.755 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-27 10:24:26.294 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by maz1721 using {'in_game_name': 'Aurochsis'} +2023-06-27 10:26:54.865 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 10:27:15.556 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by maz1721 using {'in_game_name': 'ha11oga11o'} +2023-06-27 11:26:54.869 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 12:26:54.871 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 12:53:12.943 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by cican using {'in_game_name': 'THE LURKER'} +2023-06-27 13:26:54.886 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 14:26:54.895 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 15:26:54.911 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 16:26:54.921 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 17:26:54.926 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 18:26:54.945 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 19:26:54.963 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 20:26:54.973 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 21:26:54.989 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 22:26:54.984 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-27 23:26:54.998 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 00:09:38.111 | INFO | cogs.check_player:check_player:18 - check_player used by MarksDad#0894 using {'player': 'Kaptaks'} +2023-06-28 00:26:55.009 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 00:33:21.173 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by MarksDad#0894 using {'in_game_name': 'bathy89'} +2023-06-28 01:26:55.021 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 02:26:55.015 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 03:26:55.024 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 04:26:55.032 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 05:26:55.035 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 06:26:55.050 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 07:26:55.058 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 08:26:55.070 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 09:26:55.090 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 10:26:55.101 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 11:26:55.100 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 12:26:55.108 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 13:26:55.114 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 14:26:55.111 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 15:26:55.123 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 16:26:55.125 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 17:26:55.149 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 18:26:55.152 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 19:26:55.164 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 20:26:55.182 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 21:26:55.189 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 22:26:55.204 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-28 23:26:55.200 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 00:15:59.875 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'papoune'} +2023-06-29 00:16:11.996 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'papoune'} +2023-06-29 00:26:55.206 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 01:26:55.204 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 02:26:55.228 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 03:26:55.239 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 04:26:55.245 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 05:26:55.257 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 06:26:55.256 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 07:26:55.273 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 08:26:55.291 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 09:26:55.293 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 10:26:55.308 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 11:26:55.315 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 12:00:39.351 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'naimatek'} +2023-06-29 12:00:58.212 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'naimatek'} +2023-06-29 12:03:42.136 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by boergeiam using {'in_game_name': 'Naimatek'} +2023-06-29 12:07:55.650 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'patrick'} +2023-06-29 12:26:55.334 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 13:26:55.349 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 13:33:24.687 | INFO | cogs.vm_tracker:edit_vm:40 - edit_vm used by .strix. using {'in_game_name': 'medh', 'action': 'Remove'} +2023-06-29 14:26:55.354 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 15:18:03.233 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'G The Insane'} +2023-06-29 15:18:24.608 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'g the insane'} +2023-06-29 15:19:05.274 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'G The Insane'} +2023-06-29 15:26:55.356 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 16:26:55.361 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 17:26:55.384 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 17:54:49.163 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 17:54:49.175 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-29 17:54:58.244 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 17:54:58.254 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-29 17:54:58.256 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 17:54:58.262 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-06-29 17:55:10.030 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-06-29 17:55:10.031 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-06-29 17:55:31.473 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-06-29 17:55:31.474 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-06-29 17:56:01.156 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-06-29 17:56:01.157 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-06-29 17:56:31.782 | INFO | cogs.vm_tracker:setup:164 - cogs.vm_tracker Loaded +2023-06-29 17:56:31.784 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 17:57:34.183 | INFO | cogs.vm_tracker:edit_vm:40 - edit_vm used by .strix. using {'in_game_name': 'Shaggy Daddy', 'action': 'Remove'} +2023-06-29 17:58:00.712 | INFO | cogs.vm_tracker:edit_vm:40 - edit_vm used by .strix. using {'in_game_name': 'pistike', 'action': 'Remove'} +2023-06-29 17:58:19.603 | INFO | cogs.vm_tracker:edit_vm:40 - edit_vm used by .strix. using {'in_game_name': 'ozzyes9304', 'action': 'Remove'} +2023-06-29 17:58:27.222 | INFO | cogs.vm_tracker:edit_vm:40 - edit_vm used by .strix. using {'in_game_name': 'StronkStrix', 'action': 'Remove'} +2023-06-29 18:56:31.777 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 19:54:58.271 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 19:54:58.289 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-06-29 19:55:08.271 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-06-29 19:55:08.272 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-06-29 19:55:24.424 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-06-29 19:55:24.425 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-06-29 19:55:44.074 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-06-29 19:55:44.075 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-06-29 19:56:31.785 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 20:56:31.810 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 21:35:22.007 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 21:35:22.052 | INFO | cogs.shk_info_tasks:setup:54 - cogs.shk_info_tasks Loaded +2023-06-29 21:35:22.055 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 21:35:22.098 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-29 21:35:39.622 | INFO | cogs.shk_info_tasks:getHouseData:50 - Finished World 2 update +2023-06-29 21:35:39.623 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-29 21:35:58.311 | INFO | cogs.shk_info_tasks:getHouseData:50 - Finished Global Conflict 8 update +2023-06-29 21:35:58.312 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-29 21:36:14.630 | INFO | cogs.shk_info_tasks:getHouseData:50 - Finished Europe 8 update +2023-06-29 21:36:14.631 | INFO | cogs.shk_info_tasks:getHouseData:51 - Finished House Data update +2023-06-29 21:56:31.819 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 22:56:31.829 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-29 23:00:58.967 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 23:00:58.979 | INFO | cogs.shk_info_tasks:setup:54 - cogs.shk_info_tasks Loaded +2023-06-29 23:00:58.981 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 23:00:58.984 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-29 23:04:13.501 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 23:04:13.513 | INFO | cogs.shk_info_tasks:setup:53 - cogs.shk_info_tasks Loaded +2023-06-29 23:04:13.515 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 23:04:13.519 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-29 23:04:21.147 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished World 2 update +2023-06-29 23:04:21.148 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-29 23:04:35.449 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished Global Conflict 8 update +2023-06-29 23:04:35.450 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-29 23:04:47.552 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished Europe 8 update +2023-06-29 23:04:47.553 | INFO | cogs.shk_info_tasks:getHouseData:50 - Finished House Data update +2023-06-29 23:04:55.755 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 23:04:55.759 | INFO | cogs.shk_info_tasks:setup:53 - cogs.shk_info_tasks Loaded +2023-06-29 23:04:55.766 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 23:04:55.771 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-29 23:05:03.535 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished World 2 update +2023-06-29 23:05:03.536 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Global Conflict 8 +2023-06-29 23:05:14.715 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished Global Conflict 8 update +2023-06-29 23:05:14.717 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for Europe 8 +2023-06-29 23:05:30.354 | INFO | cogs.shk_info_tasks:getHouseData:49 - Finished Europe 8 update +2023-06-29 23:05:30.355 | INFO | cogs.shk_info_tasks:getHouseData:50 - Finished House Data update +2023-06-29 23:09:05.480 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 23:09:05.489 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-29 23:09:05.490 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 23:09:05.497 | INFO | cogs.shk_info_tasks:getHouseData:35 - Updating house info for World 2 +2023-06-29 23:14:36.460 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-06-29 23:14:36.470 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-06-29 23:14:36.471 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-29 23:14:36.481 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-06-29 23:14:46.350 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-06-29 23:14:46.351 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-06-29 23:15:09.179 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-06-29 23:15:09.180 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-06-29 23:15:34.389 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-06-29 23:15:34.389 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-06-29 23:56:31.850 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 00:56:31.855 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 01:14:36.487 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-06-30 01:14:36.513 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-06-30 01:14:47.985 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-06-30 01:14:47.986 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-06-30 01:15:01.435 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-06-30 01:15:01.437 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-06-30 01:56:31.862 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 02:56:31.882 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 03:56:31.882 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 04:56:31.893 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 05:56:31.907 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 06:56:31.907 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 07:56:31.903 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 08:56:31.901 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 09:56:31.905 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 10:56:31.916 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 11:56:31.930 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 12:56:31.951 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 13:56:31.963 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 14:56:31.972 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 15:56:31.984 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 16:17:39.615 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by .kattungen using {'in_game_name': 'ljtr'} +2023-06-30 16:52:49.610 | INFO | cogs.check_player:check_player:18 - check_player used by cican using {'player': 'Mr Fix'} +2023-06-30 16:56:32.007 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 17:56:32.012 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 18:56:32.019 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 19:09:41.114 | INFO | cogs.check_player:check_player:18 - check_player used by Pefqa#9250 using {'player': 'seyasaja'} +2023-06-30 19:10:23.425 | INFO | cogs.check_player:check_player:18 - check_player used by Pefqa#9250 using {'player': 'sayasaja'} +2023-06-30 19:11:24.061 | INFO | cogs.check_player:check_player:18 - check_player used by Pefqa#9250 using {'player': 'sayasaja'} +2023-06-30 19:56:32.022 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 20:06:18.299 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-06-30 20:56:32.040 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 21:56:32.054 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 22:21:29.075 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'jaemiller'} +2023-06-30 22:56:32.065 | INFO | cogs.vm_tracker:update_vm_embeds:83 - ---> Updating VM adverts +2023-06-30 23:19:24.517 | INFO | cogs.vm_tracker:setup:165 - cogs.vm_tracker Loaded +2023-06-30 23:19:24.519 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 00:19:24.538 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 01:19:25.833 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 02:19:25.842 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 03:19:25.835 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 04:19:25.861 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 05:19:25.877 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 06:19:25.890 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 07:19:25.897 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 08:19:25.900 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 09:19:25.904 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 10:19:25.922 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 11:19:25.938 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 12:19:25.962 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 12:41:47.462 | INFO | cogs.check_player:check_player:18 - check_player used by .kattungen using {'player': 'sayasaja'} +2023-07-01 13:19:25.986 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 14:19:26.002 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 15:19:25.993 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 16:19:26.023 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 17:19:26.033 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 18:19:26.050 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 19:19:26.065 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 20:19:26.075 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 21:19:26.076 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 22:19:26.097 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-01 22:28:20.434 | INFO | cogs.check_player:check_player:18 - check_player used by valkat using {'player': 'Kruezritter Stefan'} +2023-07-01 23:19:26.107 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 00:19:26.115 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 01:19:26.140 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 02:19:26.154 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 03:19:26.154 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 04:19:26.172 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 05:19:26.178 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 06:19:26.180 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 07:19:26.201 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 08:19:26.213 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 09:19:26.220 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 10:19:26.217 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 11:19:26.208 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 12:19:26.221 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 13:19:26.217 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 14:19:26.216 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 15:19:26.232 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 16:19:26.239 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 17:19:26.262 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:07:39.284 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-07-02 18:07:39.311 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-07-02 18:07:39.313 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-02 18:07:39.409 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-02 18:07:50.166 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-02 18:07:50.167 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-02 18:08:03.181 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-07-02 18:08:03.182 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-07-02 18:08:14.489 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-07-02 18:08:14.490 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-07-02 18:08:27.639 | INFO | cogs.vm_tracker:setup:165 - cogs.vm_tracker Loaded +2023-07-02 18:08:27.641 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:51:38.617 | INFO | cogs.vm_tracker:setup:173 - cogs.vm_tracker Loaded +2023-07-02 18:51:38.619 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:51:48.672 | INFO | cogs.vm_tracker:setup:173 - cogs.vm_tracker Loaded +2023-07-02 18:51:48.674 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:56:29.749 | INFO | cogs.vm_tracker:setup:174 - cogs.vm_tracker Loaded +2023-07-02 18:56:29.751 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:58:33.662 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-02 18:58:33.663 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:59:05.883 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-02 18:59:05.885 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 18:59:25.049 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-02 18:59:25.051 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 19:59:25.065 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 20:07:39.324 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-02 20:07:39.366 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-02 20:07:46.667 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-02 20:07:46.669 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-02 20:07:56.198 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-07-02 20:07:56.200 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-07-02 20:08:08.397 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-07-02 20:08:08.398 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-07-02 20:59:25.059 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 21:59:25.076 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 22:07:39.324 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-02 22:07:39.626 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-02 22:07:49.868 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-02 22:07:49.869 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-02 22:08:00.457 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-07-02 22:08:00.459 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-07-02 22:08:11.748 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-07-02 22:08:11.750 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-07-02 22:59:25.073 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-02 23:59:25.077 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 00:07:39.327 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-03 00:07:39.346 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-03 00:07:47.254 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-03 00:07:47.255 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-03 00:07:56.947 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-07-03 00:07:56.948 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-07-03 00:08:09.001 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-07-03 00:08:09.002 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-07-03 00:45:01.375 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'darxtar'} +2023-07-03 00:45:20.911 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'darxtar'} +2023-07-03 00:45:37.717 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'darXtar'} +2023-07-03 00:59:25.083 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 01:21:19.294 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'I AM JAM3S'} +2023-07-03 01:59:25.098 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 02:07:39.323 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-03 02:07:39.439 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-03 02:59:25.115 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 03:59:25.126 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 04:59:25.147 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 05:59:25.159 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 06:59:25.165 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 07:59:25.169 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 08:59:25.185 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 09:59:25.190 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 10:59:25.195 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 11:20:21.047 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'Boycott'} +2023-07-03 11:20:35.060 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'Boycott'} +2023-07-03 11:59:25.200 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 12:59:25.206 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 13:59:25.203 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 14:59:25.201 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 15:59:25.220 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 16:59:25.230 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 17:27:38.590 | INFO | cogs.check_player:check_player:18 - check_player used by cican using {'player': 'Nemc'} +2023-07-03 17:59:25.240 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 18:09:19.776 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by MarksDad#0894 using {'in_game_name': 'Fatfrog'} +2023-07-03 18:59:25.250 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 19:59:25.255 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 20:59:25.268 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 21:59:25.287 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 22:59:25.288 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 23:07:45.938 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-07-03 23:07:45.940 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-07-03 23:07:45.987 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-07-03 23:07:45.988 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-07-03 23:07:45.990 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-07-03 23:07:45.992 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-07-03 23:07:46.007 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-03 23:07:46.008 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-07-03 23:07:46.009 | INFO | __main__:main:101 - Starting bot +2023-07-03 23:07:50.527 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-03 23:07:50.529 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-03 23:07:50.531 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-03 23:07:50.864 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-03 23:07:58.327 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-03 23:07:58.329 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-03 23:11:14.707 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-07-03 23:11:14.711 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-07-03 23:11:14.713 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-03 23:11:14.717 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-03 23:11:25.502 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished World 2 update +2023-07-03 23:11:25.504 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Global Conflict 8 +2023-07-03 23:11:34.350 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Global Conflict 8 update +2023-07-03 23:11:34.351 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for Europe 8 +2023-07-03 23:11:45.601 | INFO | cogs.shk_info_tasks:getHouseData:53 - Finished Europe 8 update +2023-07-03 23:11:45.602 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished House Data update +2023-07-04 00:07:46.047 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 00:54:55.425 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 01:06:22.698 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 01:07:46.042 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 01:11:14.709 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-04 01:11:14.863 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-04 02:04:56.173 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 02:07:46.047 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 02:49:24.088 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 03:07:46.049 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 04:07:46.038 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 04:36:45.413 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 05:07:46.056 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 06:07:46.068 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 07:07:46.069 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 08:07:46.071 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 09:07:46.083 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 10:07:46.101 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 10:22:40.303 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 10:47:26.789 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 10:58:45.338 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-07-04 10:58:45.340 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-07-04 10:58:45.379 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-07-04 10:58:45.381 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-07-04 10:58:45.382 | INFO | cogs.shk_info_tasks:setup:57 - cogs.shk_info_tasks Loaded +2023-07-04 10:58:45.383 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-07-04 10:58:45.388 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-04 10:58:45.390 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-07-04 10:58:45.391 | INFO | __main__:main:101 - Starting bot +2023-07-04 10:58:48.380 | INFO | cogs.shk_info_tasks:getHouseData:22 - Starting House Data Update +2023-07-04 10:58:48.381 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 10:58:48.383 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 10:58:49.283 | INFO | cogs.shk_info_tasks:getHouseData:36 - Updating house info for World 2 +2023-07-04 11:03:34.602 | INFO | cogs.shk_info_tasks:cog_unload:15 - cogs.shk_info_tasks Unloaded +2023-07-04 11:03:34.611 | INFO | cogs.shk_info_tasks:setup:58 - cogs.shk_info_tasks Loaded +2023-07-04 11:03:34.612 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 11:03:34.615 | INFO | cogs.shk_info_tasks:getHouseData:38 - Updating house info for World 2 +2023-07-04 11:03:53.796 | INFO | cogs.shk_info_tasks:cog_unload:16 - cogs.shk_info_tasks Unloaded +2023-07-04 11:03:53.804 | INFO | cogs.shk_info_tasks:setup:58 - cogs.shk_info_tasks Loaded +2023-07-04 11:03:53.806 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 11:03:53.808 | INFO | cogs.shk_info_tasks:getHouseData:38 - Updating house info for World 2 +2023-07-04 11:03:57.536 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished World 2 update +2023-07-04 11:03:57.537 | INFO | cogs.shk_info_tasks:getHouseData:38 - Updating house info for Global Conflict 8 +2023-07-04 11:04:09.349 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished Global Conflict 8 update +2023-07-04 11:04:09.350 | INFO | cogs.shk_info_tasks:getHouseData:38 - Updating house info for Europe 8 +2023-07-04 11:04:18.216 | INFO | cogs.shk_info_tasks:getHouseData:54 - Finished Europe 8 update +2023-07-04 11:04:18.217 | INFO | cogs.shk_info_tasks:getHouseData:55 - Finished House Data update +2023-07-04 11:05:07.979 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .strix. using {'in_game_name': 'remcoreiring', 'action': 'Remove'} +2023-07-04 11:16:43.355 | INFO | cogs.shk_info_tasks:cog_unload:16 - cogs.shk_info_tasks Unloaded +2023-07-04 11:16:43.368 | INFO | cogs.shk_info_tasks:setup:64 - cogs.shk_info_tasks Loaded +2023-07-04 11:16:43.369 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 11:16:43.632 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 11:16:43.633 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + filter_query = { + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 446, in _request + tm = TimeoutHandle(self._loop, real_timeout.total) + │ │ │ └ 10 + │ │ └ + │ └ + └ + +AttributeError: 'int' object has no attribute 'total' +2023-07-04 11:16:43.675 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 11:17:47.082 | INFO | cogs.shk_info_tasks:cog_unload:16 - cogs.shk_info_tasks Unloaded +2023-07-04 11:17:47.090 | INFO | cogs.shk_info_tasks:setup:64 - cogs.shk_info_tasks Loaded +2023-07-04 11:17:47.092 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 11:17:47.094 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 11:17:52.550 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 11:17:52.551 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 11:17:59.292 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 11:17:59.293 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 11:18:08.499 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 11:18:08.500 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 11:22:55.237 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .kattungen using {'in_game_name': 'remcoreiring', 'action': 'Remove'} +2023-07-04 11:58:45.416 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 12:51:27.270 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by cican using {'in_game_name': 'GreenHen1'} +2023-07-04 12:58:45.428 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 13:17:47.087 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 13:17:47.155 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 13:17:51.797 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 13:17:51.798 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 13:17:58.361 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 13:17:58.361 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 13:18:07.370 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 13:18:07.371 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 13:58:45.436 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 14:58:45.456 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 15:17:47.113 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 15:17:47.482 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 15:17:59.287 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 15:17:59.288 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 15:18:05.985 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 15:18:05.986 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 15:18:14.646 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 15:18:14.647 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 15:58:45.461 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 16:58:45.463 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 17:17:47.107 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 17:17:47.238 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 17:17:52.625 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 17:17:52.626 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 17:17:59.239 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 17:17:59.240 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 17:18:08.183 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 17:18:08.185 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 17:58:45.470 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 18:46:30.218 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 18:58:45.464 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 19:17:47.129 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 19:17:47.214 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 19:17:50.944 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 19:17:50.944 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 19:17:57.555 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 19:17:57.556 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 19:18:06.828 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 19:18:06.828 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 19:58:45.476 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 20:58:45.480 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 21:17:47.145 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 21:17:47.370 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 21:17:52.060 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 21:17:52.061 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 21:17:58.564 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 21:17:58.565 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 21:18:07.474 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 21:18:07.475 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 21:35:27.866 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-04 21:58:45.485 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 22:58:45.500 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-04 23:17:47.149 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-04 23:17:47.761 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-04 23:17:56.870 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-04 23:17:56.871 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-04 23:18:03.777 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-04 23:18:03.778 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-04 23:18:12.848 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-04 23:18:12.849 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-04 23:58:45.503 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 00:58:46.537 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 01:17:48.178 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 01:17:48.245 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 01:17:57.368 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 01:17:57.369 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 01:18:03.941 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 01:18:03.942 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 01:18:12.390 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 01:18:12.391 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 01:55:31.096 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 01:58:46.547 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 02:10:15.436 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 02:58:46.567 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 03:17:48.189 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 03:17:48.513 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 03:17:54.530 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 03:17:54.531 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 03:18:00.932 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 03:18:00.933 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 03:18:10.231 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 03:18:10.233 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 03:49:57.750 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 03:58:46.561 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 04:58:46.582 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 05:17:48.205 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 05:17:48.299 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 05:17:57.170 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 05:17:57.171 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 05:18:04.072 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 05:18:04.073 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 05:18:12.832 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 05:18:12.834 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 05:38:46.928 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 05:58:46.589 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 06:44:47.944 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 06:58:46.593 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 07:11:40.408 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 07:17:48.211 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 07:17:48.267 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 07:17:58.064 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 07:17:58.065 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 07:18:04.811 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 07:18:04.812 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 07:18:13.043 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 07:18:13.044 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 07:58:46.611 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 08:58:46.605 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 09:17:48.228 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 09:17:48.274 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 09:17:57.582 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 09:17:57.583 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 09:18:04.511 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 09:18:04.513 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 09:18:12.899 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 09:18:12.900 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 09:58:46.608 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 10:02:08.953 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 10:28:40.254 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 10:58:46.622 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 11:17:48.236 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 11:17:48.360 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 11:17:57.440 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 11:17:57.441 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 11:18:04.299 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 11:18:04.300 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 11:18:12.685 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 11:18:12.686 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 11:58:46.629 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 12:58:46.624 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 13:17:48.254 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 13:17:48.548 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 13:18:01.923 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 13:18:01.924 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 13:18:09.961 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 13:18:09.963 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 13:18:20.791 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 13:18:20.792 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 13:29:37.991 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 13:58:46.647 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 14:58:46.643 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 15:17:48.275 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 15:17:48.344 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 15:18:00.187 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 15:18:00.189 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 15:18:11.725 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 15:18:11.726 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 15:18:24.563 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 15:18:24.564 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 15:36:33.116 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 15:58:46.651 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 16:58:46.676 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 17:07:37.381 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 17:17:48.279 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 17:17:48.365 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 17:17:54.917 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 17:17:54.918 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 17:18:01.897 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 17:18:01.898 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 17:18:10.855 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 17:18:10.856 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 17:58:46.685 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 18:58:46.691 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 19:17:45.863 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 19:17:48.291 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 19:17:48.387 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 19:17:59.734 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 19:17:59.735 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 19:18:06.263 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 19:18:06.265 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 19:18:14.558 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 19:18:14.559 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 19:45:40.106 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-05 19:58:46.709 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 20:58:46.715 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 21:17:48.296 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 21:17:48.318 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 21:17:54.498 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 21:17:54.499 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 21:18:00.930 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 21:18:00.931 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 21:18:10.208 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 21:18:10.209 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 21:58:46.715 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 22:58:46.725 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-05 23:17:48.298 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-05 23:17:48.479 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-05 23:17:55.299 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-05 23:17:55.300 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-05 23:18:01.907 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-05 23:18:01.908 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-05 23:18:10.568 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-05 23:18:10.569 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-05 23:58:46.735 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 00:42:12.740 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by .kattungen using {'in_game_name': 'Ethan22'} +2023-07-06 00:58:46.757 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 01:17:48.302 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 01:17:48.386 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 01:18:02.197 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 01:18:02.198 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 01:18:08.653 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 01:18:08.654 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 01:18:17.000 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 01:18:17.001 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 01:58:46.761 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 02:58:46.766 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 03:17:48.308 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 03:17:48.462 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 03:17:56.049 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 03:17:56.050 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 03:18:03.724 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 03:18:03.725 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 03:18:12.692 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 03:18:12.693 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 03:58:46.787 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 04:55:55.653 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 04:58:46.803 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 05:17:48.314 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 05:17:48.419 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 05:18:02.477 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 05:18:02.478 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 05:18:08.981 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 05:18:08.982 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 05:18:17.537 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 05:18:17.538 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 05:58:46.812 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 06:22:46.681 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 06:58:46.817 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 07:17:48.316 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 07:17:48.395 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 07:18:08.915 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-06 07:18:08.961 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 07:29:10.708 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 07:58:46.817 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 08:05:40.369 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 08:58:46.834 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 09:17:48.309 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 09:17:48.695 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 09:17:53.262 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 09:17:53.263 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 09:17:59.835 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 09:17:59.836 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 09:18:08.767 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 09:18:08.768 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 09:50:57.055 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 09:58:46.818 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 10:42:54.559 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 10:58:46.837 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 11:17:48.318 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 11:17:48.403 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 11:17:56.195 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 11:17:56.196 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 11:18:03.269 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 11:18:03.270 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 11:18:11.794 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 11:18:11.796 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 11:58:46.849 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 12:58:46.851 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 13:17:48.323 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 13:17:48.329 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 13:17:55.025 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 13:17:55.026 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 13:18:01.738 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 13:18:01.739 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 13:18:10.187 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 13:18:10.189 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 13:33:30.863 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .kattungen using {'in_game_name': 'Queen Olive Oil', 'action': 'Remove'} +2023-07-06 13:34:22.550 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .kattungen using {'in_game_name': 'Grumpy Farmer', 'action': 'Remove'} +2023-07-06 13:36:43.016 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .kattungen using {'in_game_name': 'A7meDPunKMeDo', 'action': 'Remove'} +2023-07-06 13:36:55.900 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by .kattungen using {'in_game_name': 'Modesty', 'action': 'Remove'} +2023-07-06 13:58:46.860 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 14:58:46.880 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 15:17:48.325 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 15:17:48.330 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 15:17:57.553 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 15:17:57.554 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 15:18:04.831 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 15:18:04.832 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 15:18:13.025 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 15:18:13.026 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 15:39:16.568 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 15:58:46.893 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 16:04:04.204 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 16:58:46.899 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 17:17:48.346 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 17:17:48.353 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 17:17:55.189 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 17:17:55.190 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 17:18:01.718 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 17:18:01.719 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 17:18:10.507 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 17:18:10.508 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 17:22:30.326 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 17:58:46.917 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 18:08:17.243 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 18:58:46.931 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 19:17:48.346 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 19:17:48.351 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 19:18:08.901 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-06 19:18:08.934 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 19:58:46.942 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 20:58:46.946 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 21:08:09.251 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-06 21:17:48.357 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 21:17:48.394 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 21:17:59.815 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 21:17:59.816 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 21:18:06.506 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 21:18:06.507 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 21:18:15.324 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 21:18:15.325 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 21:58:46.956 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 22:58:46.975 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-06 23:17:48.373 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-06 23:17:48.418 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-06 23:17:59.889 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-06 23:17:59.889 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-06 23:18:06.726 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-06 23:18:06.727 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-06 23:18:15.150 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-06 23:18:15.151 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-06 23:58:46.976 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 00:58:46.983 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 00:59:13.362 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 01:17:48.384 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 01:17:48.816 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 01:17:58.766 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 01:17:58.767 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 01:18:05.772 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 01:18:05.774 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 01:18:13.774 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 01:18:13.775 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 01:58:47.006 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 02:58:47.018 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 03:17:48.412 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 03:17:48.805 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 03:17:56.530 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 03:17:56.532 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 03:18:03.582 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 03:18:03.583 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 03:18:11.660 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 03:18:11.661 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 03:49:43.137 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 03:56:20.448 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 03:58:47.034 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 04:58:47.043 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 05:17:48.424 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 05:17:48.748 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 05:18:08.056 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 05:18:08.057 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 05:18:14.669 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 05:18:14.671 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 05:18:23.578 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 05:18:23.579 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 05:58:47.065 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 06:28:32.296 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 06:55:45.569 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 06:58:47.077 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 07:17:48.435 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 07:17:48.508 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 07:17:56.620 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 07:17:56.622 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 07:18:03.449 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 07:18:03.450 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 07:18:11.480 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 07:18:11.481 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 07:58:47.077 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 08:58:47.078 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 09:17:48.452 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 09:17:48.516 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 09:17:56.579 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 09:17:56.580 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 09:18:03.550 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 09:18:03.551 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 09:18:11.984 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 09:18:11.985 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 09:37:33.033 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 09:58:42.969 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by urcracked#8261 using {'in_game_name': 'Hilksmash'} +2023-07-07 09:58:47.089 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 09:59:09.145 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by urcracked#8261 using {'in_game_name': 'Hulksmash'} +2023-07-07 10:16:11.322 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 10:58:47.084 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 11:17:48.449 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 11:17:48.454 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 11:17:55.490 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 11:17:55.491 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 11:18:02.549 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 11:18:02.551 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 11:18:10.922 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 11:18:10.924 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 11:21:56.249 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 11:58:47.091 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 12:45:58.197 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 12:52:34.911 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 12:58:47.093 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 13:17:48.445 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 13:17:48.470 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 13:17:58.580 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 13:17:58.581 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 13:18:05.761 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 13:18:05.762 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 13:18:14.574 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 13:18:14.575 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 13:27:23.564 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 13:58:47.113 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 14:58:47.121 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 15:10:17.286 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 15:17:48.476 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 15:17:48.516 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 15:17:59.112 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 15:17:59.112 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 15:18:05.933 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 15:18:05.934 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 15:18:14.053 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 15:18:14.054 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 15:58:47.126 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 16:54:49.380 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 16:58:47.136 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 17:01:26.286 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 17:09:39.996 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 17:17:48.477 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 17:17:49.355 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 17:18:00.773 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-07 17:18:00.893 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 17:58:47.136 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 18:02:20.276 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 18:21:21.636 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 18:58:47.135 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 19:17:48.490 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 19:17:48.569 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 19:17:56.280 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 19:17:56.281 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 19:18:03.059 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 19:18:03.061 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 19:18:11.094 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 19:18:11.095 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 19:58:47.143 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 20:33:26.322 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 20:47:37.562 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 20:58:47.153 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 21:17:48.506 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 21:17:48.579 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 21:17:52.148 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 21:17:52.149 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 21:17:58.932 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 21:17:58.934 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 21:18:07.525 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 21:18:07.526 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 21:58:47.159 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 22:54:54.721 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 22:58:47.158 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-07 23:17:48.523 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-07 23:17:48.675 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-07 23:17:57.970 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-07 23:17:57.971 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-07 23:18:05.047 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-07 23:18:05.048 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-07 23:18:13.603 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-07 23:18:13.605 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-07 23:24:33.419 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-07 23:58:47.169 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 00:58:47.181 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 01:17:48.530 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 01:17:48.642 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 01:17:57.092 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 01:17:57.093 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 01:18:04.160 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 01:18:04.161 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 01:18:12.816 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 01:18:12.817 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 01:37:39.882 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 01:58:47.185 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 02:58:47.194 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 03:17:48.525 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 03:17:48.659 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 03:17:54.423 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 03:17:54.424 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 03:18:01.306 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 03:18:01.307 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 03:18:10.020 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 03:18:10.021 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 03:58:47.197 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 04:58:47.205 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 05:17:48.523 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 05:17:48.529 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 05:17:59.899 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-08 05:17:59.969 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 05:58:47.212 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 06:56:30.470 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 06:58:47.217 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 07:17:48.553 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 07:17:48.559 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 07:18:00.402 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 07:18:00.403 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 07:18:12.222 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 07:18:12.223 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 07:18:21.256 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 07:18:21.257 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 07:58:47.227 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 08:38:40.005 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 08:58:47.249 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 09:02:29.380 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 09:17:48.562 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 09:17:48.584 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 09:18:01.454 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 09:18:01.455 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 09:18:08.305 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 09:18:08.305 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 09:18:17.082 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 09:18:17.083 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 09:25:35.627 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 09:58:47.246 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 10:58:47.256 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 11:17:48.562 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 11:17:48.602 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 11:17:54.793 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 11:17:54.794 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 11:18:02.190 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 11:18:02.191 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 11:18:10.813 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 11:18:10.814 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 11:58:47.270 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 12:58:47.283 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 13:17:48.568 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 13:17:48.651 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 13:17:53.582 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 13:17:53.583 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 13:18:00.415 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 13:18:00.416 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 13:18:09.354 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 13:18:09.356 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 13:28:23.160 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 13:58:47.294 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 14:18:09.845 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 14:58:47.317 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 15:17:48.583 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 15:17:48.786 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 15:18:01.169 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 15:18:01.170 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 15:18:07.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 15:18:07.717 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 15:18:16.415 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 15:18:16.416 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 15:31:42.018 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 15:58:47.314 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 16:58:47.308 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 17:17:48.587 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 17:17:48.671 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 17:17:55.803 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 17:17:55.804 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 17:18:02.909 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 17:18:02.910 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 17:18:10.998 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 17:18:11.000 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 17:31:35.043 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 17:58:47.324 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 18:20:10.002 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 18:58:47.324 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 19:12:30.364 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 19:17:48.593 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 19:17:48.733 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 19:18:00.100 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-08 19:18:00.165 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 19:58:47.324 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 20:58:47.332 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 21:17:48.608 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 21:17:48.763 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 21:17:53.902 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-08 21:17:53.903 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-08 21:18:01.743 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-08 21:18:01.744 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-08 21:18:09.615 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-08 21:18:09.616 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 21:58:47.340 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 22:18:46.798 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-08 22:58:47.365 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-08 23:17:48.614 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-08 23:17:48.712 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-08 23:18:08.908 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-08 23:18:08.946 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-08 23:58:47.373 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 00:58:47.386 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 01:04:30.743 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 01:17:48.616 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 01:17:49.637 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 01:17:57.793 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 01:17:57.794 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 01:18:04.749 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 01:18:04.750 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 01:18:12.443 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 01:18:12.444 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 01:58:47.386 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 02:15:25.538 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 02:58:47.384 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 03:14:36.596 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 03:17:48.592 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 03:17:49.985 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 03:18:01.743 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 03:18:01.744 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 03:18:09.066 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 03:18:09.067 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 03:18:18.016 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 03:18:18.017 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 03:58:47.402 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 04:58:47.410 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 05:17:48.624 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 05:17:49.653 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 05:17:57.328 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 05:17:57.329 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 05:18:04.479 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 05:18:04.480 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 05:18:12.805 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 05:18:12.806 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 05:58:47.421 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 06:22:05.574 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 06:58:47.434 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 07:17:48.641 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 07:17:49.826 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 07:18:02.971 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 07:18:02.972 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 07:18:10.105 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 07:18:10.106 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 07:18:18.776 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 07:18:18.778 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 07:58:47.452 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 08:58:47.458 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 09:17:48.653 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 09:17:48.959 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 09:17:55.250 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 09:17:55.252 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 09:18:02.569 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 09:18:02.570 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 09:18:10.884 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 09:18:10.885 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 09:58:47.461 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 10:17:35.182 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 10:58:47.475 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 11:17:48.668 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 11:17:49.157 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 11:18:01.196 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 11:18:01.197 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 11:18:08.056 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 11:18:08.057 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 11:18:16.316 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 11:18:16.318 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 11:46:06.093 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'avr0005', 'action': 'Remove'} +2023-07-09 11:50:05.848 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'xX MAHMMOUD Xx', 'action': 'Remove'} +2023-07-09 11:50:37.198 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'GRUMPY FARMER', 'action': 'Remove'} +2023-07-09 11:52:02.750 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Black Cops', 'action': 'Remove'} +2023-07-09 11:52:32.126 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Dark Ladixer100000', 'action': 'Remove'} +2023-07-09 11:56:31.443 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'lulica1', 'action': 'Remove'} +2023-07-09 11:56:55.655 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'lulica1'} +2023-07-09 11:57:08.672 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'lulica1', 'action': 'Adjust Start Time', 'value': 24} +2023-07-09 11:57:47.664 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Jamiedodger', 'action': 'Remove'} +2023-07-09 11:58:47.476 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 12:30:13.401 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Missin', 'action': 'Remove'} +2023-07-09 12:31:34.988 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ozcairns44', 'action': 'Remove'} +2023-07-09 12:32:02.662 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'diana94', 'action': 'Remove'} +2023-07-09 12:33:21.195 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Aurochsis', 'action': 'Remove'} +2023-07-09 12:34:11.947 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'kynddaddy', 'action': 'Remove'} +2023-07-09 12:37:25.810 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ZHOMBIE', 'action': 'Remove'} +2023-07-09 12:38:08.699 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Zubrowski', 'action': 'Remove'} +2023-07-09 12:47:13.567 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Georglll', 'action': 'Remove'} +2023-07-09 12:48:29.401 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 12:48:44.832 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'modesty', 'action': 'Remove'} +2023-07-09 12:48:49.519 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'trevorbooth', 'action': 'Remove'} +2023-07-09 12:49:17.797 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Kreuzritter Stefan', 'action': 'Remove'} +2023-07-09 12:49:41.750 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Schwarzer Nero', 'action': 'Remove'} +2023-07-09 12:50:55.742 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 12:58:47.477 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 13:17:48.674 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 13:17:48.679 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 13:17:58.687 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 13:17:58.688 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 13:18:05.658 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 13:18:05.659 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 13:18:13.757 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 13:18:13.758 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 13:58:47.485 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 14:58:47.480 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 15:17:48.677 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 15:17:48.681 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 15:17:59.933 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 15:17:59.934 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 15:18:07.229 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 15:18:07.230 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 15:18:16.055 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 15:18:16.056 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 15:21:12.848 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 15:58:47.494 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 16:44:05.513 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 16:58:47.496 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 17:15:27.456 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 17:17:48.680 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 17:17:48.746 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 17:17:56.452 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 17:17:56.453 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 17:18:04.090 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 17:18:04.091 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 17:18:12.241 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 17:18:12.242 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 17:37:55.481 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 17:58:47.503 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 18:58:47.504 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 19:15:32.564 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Peter Upchurch', 'action': 'Remove'} +2023-07-09 19:16:41.344 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Lord Kagulas', 'action': 'Remove'} +2023-07-09 19:17:48.682 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 19:17:48.685 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 19:17:57.756 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 19:17:57.757 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 19:18:05.252 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 19:18:05.252 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 19:18:07.405 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'robo61076', 'action': 'Remove'} +2023-07-09 19:18:13.639 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 19:18:13.640 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 19:58:47.502 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 20:30:56.987 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'WhiteGenghis'} +2023-07-09 20:31:03.340 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'WhiteGenghis'} +2023-07-09 20:41:28.082 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 20:55:43.061 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'the shiz', 'action': 'Remove'} +2023-07-09 20:56:42.563 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'the shiz'} +2023-07-09 20:56:48.246 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'the shiz'} +2023-07-09 20:57:41.622 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'The Shiz'} +2023-07-09 20:57:56.009 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'the shiz', 'action': 'Adjust Start Time', 'value': 4} +2023-07-09 20:58:12.738 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'The Shiz', 'action': 'Adjust Start Time', 'value': 4} +2023-07-09 20:58:47.516 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 21:04:35.940 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 21:17:48.686 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 21:17:48.691 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 21:17:57.764 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 21:17:57.765 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 21:18:04.958 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 21:18:04.959 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 21:18:13.428 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 21:18:13.430 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 21:27:21.285 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-09 21:58:47.536 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 22:58:47.548 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-09 23:17:48.692 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-09 23:17:48.729 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-09 23:17:56.904 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-09 23:17:56.905 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-09 23:18:04.353 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-09 23:18:04.354 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-09 23:18:12.498 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-09 23:18:12.499 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-09 23:58:47.574 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 00:35:05.740 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 00:58:48.850 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 01:17:49.983 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 01:17:49.998 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 01:18:01.619 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 01:18:01.620 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 01:18:08.798 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 01:18:08.799 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 01:18:17.250 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 01:18:17.251 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 01:58:48.859 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 02:56:05.809 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 02:58:48.878 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 03:17:49.984 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 03:17:49.988 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 03:17:57.792 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 03:17:57.793 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 03:18:05.415 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 03:18:05.416 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 03:18:13.855 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 03:18:13.856 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 03:58:48.889 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 04:01:23.490 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 04:58:48.902 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 05:17:49.987 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 05:17:49.991 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 05:17:59.403 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 05:17:59.404 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 05:18:06.611 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 05:18:06.612 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 05:18:14.745 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 05:18:14.746 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 05:22:15.198 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 05:58:48.912 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 06:58:48.909 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 07:17:50.002 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 07:17:50.140 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 07:17:58.000 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 07:17:58.000 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 07:18:05.223 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 07:18:05.224 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 07:18:13.797 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 07:18:13.798 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 07:58:48.927 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 08:58:48.930 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 09:17:50.020 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 09:17:50.060 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 09:17:58.299 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 09:17:58.300 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 09:18:05.581 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 09:18:05.582 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 09:18:13.785 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 09:18:13.786 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 09:34:57.159 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 09:58:48.933 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 10:58:48.934 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 11:17:50.048 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 11:17:50.051 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 11:17:59.615 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 11:17:59.616 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 11:18:06.882 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 11:18:06.883 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 11:18:15.471 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 11:18:15.472 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 11:58:48.933 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 12:26:49.164 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ljtr', 'action': 'Remove'} +2023-07-10 12:43:14.452 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 12:58:48.931 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 13:12:25.170 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 13:17:50.060 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 13:17:50.093 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 13:17:54.650 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 13:17:54.651 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 13:18:02.032 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 13:18:02.033 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 13:18:10.641 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 13:18:10.643 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 13:58:48.925 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 14:58:48.930 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 15:17:50.053 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 15:17:50.103 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 15:17:58.357 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 15:17:58.358 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 15:18:05.902 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 15:18:05.903 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 15:18:13.998 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 15:18:13.999 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 15:58:48.949 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 16:53:32.837 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 16:58:48.950 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 17:17:50.045 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 17:17:50.050 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 17:17:54.989 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 17:17:54.990 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 17:18:02.651 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 17:18:02.652 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 17:18:10.839 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 17:18:10.840 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 17:58:48.958 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 18:58:48.963 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 19:17:50.081 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 19:17:50.203 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 19:17:55.538 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 19:17:55.539 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 19:18:02.678 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 19:18:02.679 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 19:18:10.879 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 19:18:10.880 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 19:58:48.973 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 20:25:40.084 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 20:50:46.398 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'aratovchanin', 'action': 'Remove'} +2023-07-10 20:58:48.978 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 21:00:19.099 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'HOTOP'} +2023-07-10 21:00:33.097 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'HOTOP', 'action': 'Adjust Start Time', 'value': 3} +2023-07-10 21:00:57.388 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Saratovchanin', 'action': 'Remove'} +2023-07-10 21:01:46.839 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ha11oga11o', 'action': 'Remove'} +2023-07-10 21:02:17.361 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'BossJeff', 'action': 'Remove'} +2023-07-10 21:02:47.458 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'cgmiller98', 'action': 'Remove'} +2023-07-10 21:03:26.764 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'GigamasterI', 'action': 'Remove'} +2023-07-10 21:14:54.444 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 21:17:50.099 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 21:17:50.222 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 21:17:56.200 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 21:17:56.202 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 21:18:03.644 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 21:18:03.645 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 21:18:11.998 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 21:18:11.999 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 21:58:48.983 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 22:20:38.977 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 22:41:01.268 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 22:58:48.990 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-10 23:17:50.100 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-10 23:17:50.123 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-10 23:18:03.752 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-10 23:18:03.752 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-10 23:18:10.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-10 23:18:10.717 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-10 23:18:19.560 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-10 23:18:19.561 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-10 23:47:06.680 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-10 23:58:49.001 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 00:58:49.005 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 01:17:50.110 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 01:17:50.198 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 01:17:55.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 01:17:55.717 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 01:18:03.370 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 01:18:03.371 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 01:18:11.643 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 01:18:11.645 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 01:58:49.010 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 02:58:49.003 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 03:17:50.104 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 03:17:50.160 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 03:17:59.397 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 03:17:59.398 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 03:18:06.651 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 03:18:06.652 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 03:18:14.628 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 03:18:14.630 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 03:58:49.031 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 04:56:08.233 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by manton56 using {'in_game_name': 'Schwarzer Nero'} +2023-07-11 04:58:49.040 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 05:17:50.114 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 05:17:50.233 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 05:17:58.241 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 05:17:58.242 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 05:18:05.368 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 05:18:05.368 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 05:18:13.631 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 05:18:13.632 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 05:58:49.056 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 06:55:59.402 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 06:58:49.080 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 07:17:50.118 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 07:17:50.197 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 07:18:01.639 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 07:18:01.640 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 07:18:09.057 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 07:18:09.058 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 07:18:19.297 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 07:18:19.298 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 07:58:49.088 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 08:58:49.089 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 09:17:50.117 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 09:17:50.122 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 09:18:01.006 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 09:18:01.008 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 09:18:08.570 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 09:18:08.571 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 09:18:17.151 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 09:18:17.152 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 09:58:49.099 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 10:58:49.100 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 11:17:50.127 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 11:17:50.189 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 11:17:57.674 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 11:17:57.674 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 11:18:05.211 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 11:18:05.212 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 11:18:13.412 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 11:18:13.413 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 11:58:49.102 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 12:33:31.558 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 12:58:49.112 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 13:06:32.746 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 13:17:50.134 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 13:17:50.248 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 13:17:57.209 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 13:17:57.210 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 13:18:04.592 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 13:18:04.593 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 13:18:12.952 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 13:18:12.953 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 13:20:42.122 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 13:56:53.195 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 13:58:49.119 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 14:19:59.811 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 14:58:49.120 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 15:17:50.136 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 15:17:50.229 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 15:17:59.385 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 15:17:59.386 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 15:18:06.571 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 15:18:06.573 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 15:18:14.750 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 15:18:14.752 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 15:58:49.128 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 16:58:49.141 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 17:09:24.952 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 17:17:50.143 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 17:17:50.190 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 17:18:00.856 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 17:18:00.857 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 17:18:08.018 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 17:18:08.019 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 17:18:16.775 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 17:18:16.776 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 17:42:07.271 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 17:58:49.142 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 18:58:49.158 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 19:17:50.145 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 19:17:50.222 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 19:17:58.624 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 19:17:58.625 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 19:18:06.034 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 19:18:06.035 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 19:18:14.129 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 19:18:14.130 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 19:58:49.153 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 20:20:52.943 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 20:58:49.172 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 21:17:50.148 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 21:17:50.168 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 21:18:01.638 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-11 21:18:01.711 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 21:58:49.176 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 22:11:08.903 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 22:54:14.229 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'tidriss', 'action': 'Remove'} +2023-07-11 22:54:42.795 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'jnevan', 'action': 'Remove'} +2023-07-11 22:58:49.190 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-11 23:17:50.158 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-11 23:17:50.162 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-11 23:17:56.410 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-11 23:17:56.411 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-11 23:18:08.560 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-11 23:18:08.561 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-11 23:18:17.495 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-11 23:18:17.496 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-11 23:20:38.377 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-11 23:58:49.200 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 00:05:42.573 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 00:12:35.602 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 00:21:32.476 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 00:58:49.207 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 01:17:50.176 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 01:17:50.893 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 01:18:00.365 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 01:18:00.366 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 01:18:07.752 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 01:18:07.753 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 01:18:16.749 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 01:18:16.750 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 01:58:49.211 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 02:58:49.212 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 03:17:50.181 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 03:17:50.364 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 03:17:59.534 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 03:17:59.535 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 03:18:06.861 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 03:18:06.862 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 03:18:17.929 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 03:18:17.930 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 03:58:49.216 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 04:58:49.221 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 05:17:50.202 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 05:17:50.220 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 05:18:03.019 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 05:18:03.020 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 05:18:10.389 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 05:18:10.390 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 05:18:19.499 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 05:18:19.500 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 05:58:49.234 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 06:58:49.241 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 07:17:50.218 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 07:17:50.224 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 07:17:58.039 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 07:17:58.040 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 07:18:06.050 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 07:18:06.051 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 07:18:14.161 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 07:18:14.162 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 07:58:49.241 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 08:58:49.260 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 09:17:50.222 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 09:17:50.228 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 09:17:58.810 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 09:17:58.811 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 09:18:06.551 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 09:18:06.552 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 09:18:15.265 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 09:18:15.266 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 09:58:49.277 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 10:41:05.118 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 10:53:53.150 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 10:58:49.291 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 11:05:03.230 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 11:17:50.228 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 11:17:50.234 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 11:17:59.854 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 11:17:59.855 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 11:18:07.454 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 11:18:07.455 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 11:18:15.578 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 11:18:15.579 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 11:58:49.314 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 12:58:49.305 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 13:17:50.233 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 13:17:50.253 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 13:17:54.821 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 13:17:54.822 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 13:18:02.440 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 13:18:02.441 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 13:18:10.596 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 13:18:10.597 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 13:58:49.310 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 14:35:22.406 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 14:58:49.320 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 15:06:02.697 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 15:17:50.235 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 15:17:50.353 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 15:18:04.242 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 15:18:04.243 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 15:18:11.447 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 15:18:11.448 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 15:18:20.032 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 15:18:20.033 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 15:58:49.314 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 16:14:20.093 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 16:28:07.399 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 16:58:49.348 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 17:17:50.239 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 17:17:51.340 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 17:18:04.036 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 17:18:04.036 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 17:18:11.888 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 17:18:11.889 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 17:18:23.048 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 17:18:23.049 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 17:20:27.451 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 17:53:45.653 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'THE LURKER', 'action': 'Remove'} +2023-07-12 17:54:50.960 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'liljon44'} +2023-07-12 17:54:56.708 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'liljon44'} +2023-07-12 17:55:54.742 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'liljon44', 'action': 'Adjust Start Time', 'value': 18} +2023-07-12 17:58:49.365 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 18:58:49.373 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 19:17:50.246 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 19:17:50.314 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 19:17:59.891 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 19:17:59.892 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 19:18:07.982 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 19:18:07.984 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 19:18:16.401 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 19:18:16.402 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 19:58:49.377 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 20:44:52.141 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 20:58:49.395 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 21:17:50.257 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 21:17:50.360 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 21:17:58.345 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 21:17:58.346 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 21:18:05.707 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 21:18:05.708 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 21:18:14.197 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 21:18:14.198 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 21:43:25.213 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 21:58:49.404 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 22:54:16.180 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 22:58:49.416 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-12 23:16:00.900 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-12 23:17:50.269 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-12 23:17:50.849 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-12 23:17:59.074 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-12 23:17:59.075 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-12 23:18:06.571 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-12 23:18:06.573 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-12 23:18:14.538 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-12 23:18:14.539 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-12 23:58:49.423 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 00:58:49.429 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 01:15:24.446 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 01:17:50.281 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 01:17:50.543 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 01:17:57.462 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 01:17:57.463 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 01:18:04.680 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 01:18:04.681 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 01:18:13.016 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 01:18:13.017 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 01:58:49.434 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 02:58:49.454 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 03:17:50.288 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 03:17:51.206 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 03:18:01.222 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 03:18:01.223 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 03:18:08.163 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 03:18:08.164 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 03:18:17.086 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 03:18:17.088 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 03:58:49.460 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 04:58:49.473 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 05:03:25.307 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 05:17:50.295 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 05:17:50.761 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 05:18:11.189 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-13 05:18:11.222 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 05:58:49.482 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 06:58:49.501 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 07:17:50.290 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 07:17:50.988 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 07:18:05.614 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 07:18:05.615 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 07:18:13.256 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 07:18:13.257 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 07:18:22.165 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 07:18:22.166 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 07:47:36.532 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 07:58:49.508 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 08:58:49.513 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 09:17:50.315 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 09:17:50.408 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 09:18:05.156 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 09:18:05.157 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 09:18:12.616 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 09:18:12.617 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 09:18:21.239 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 09:18:21.240 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 09:58:49.507 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 10:00:44.772 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 10:58:49.525 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 11:17:50.324 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 11:17:50.327 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 11:17:56.434 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 11:17:56.435 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 11:18:03.969 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 11:18:03.970 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 11:18:12.516 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 11:18:12.517 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 11:58:49.521 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 12:32:19.527 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 12:58:49.522 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 13:17:50.325 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 13:17:50.329 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 13:18:00.049 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 13:18:00.050 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 13:18:07.621 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 13:18:07.622 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 13:18:15.774 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 13:18:15.775 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 13:58:49.533 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 14:43:49.246 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 14:58:49.535 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 15:17:50.325 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 15:17:50.329 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 15:17:58.011 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 15:17:58.012 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 15:18:05.779 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 15:18:05.780 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 15:18:14.491 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 15:18:14.492 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 15:54:41.996 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 15:58:49.548 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 16:58:49.559 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 17:17:50.335 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 17:17:50.339 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 17:17:59.655 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 17:17:59.656 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 17:18:07.035 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 17:18:07.036 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 17:18:15.470 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 17:18:15.471 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 17:24:38.452 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 17:58:49.564 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 18:36:32.057 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 18:58:49.575 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 19:17:50.342 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 19:17:50.368 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 19:17:59.201 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 19:17:59.202 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 19:18:07.010 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 19:18:07.011 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 19:18:15.186 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 19:18:15.188 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 19:56:48.806 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 19:58:49.567 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 20:58:49.576 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 21:17:50.343 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 21:17:50.428 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 21:17:58.256 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 21:17:58.257 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 21:18:06.458 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 21:18:06.459 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 21:18:14.719 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 21:18:14.720 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 21:58:49.590 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 22:58:49.605 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-13 23:15:49.009 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-13 23:17:50.339 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-13 23:17:50.996 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-13 23:18:01.479 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-13 23:18:01.480 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-13 23:18:09.363 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-13 23:18:09.364 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-13 23:18:18.310 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-13 23:18:18.311 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-13 23:58:49.617 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 00:58:50.656 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 01:17:51.394 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 01:17:51.669 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 01:17:59.441 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 01:17:59.442 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 01:18:06.966 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 01:18:06.967 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 01:18:14.948 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 01:18:14.950 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 01:58:50.664 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 02:58:50.681 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 03:17:51.402 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 03:17:51.425 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 03:18:00.141 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 03:18:00.142 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 03:18:07.860 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 03:18:07.861 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 03:18:16.076 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 03:18:16.078 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 03:51:46.637 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 03:58:50.684 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 04:58:50.696 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 05:07:54.118 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 05:17:51.409 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 05:17:51.440 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 05:17:59.527 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 05:17:59.528 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 05:18:07.190 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 05:18:07.191 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 05:18:15.284 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 05:18:15.285 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 05:58:50.699 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 06:58:50.706 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 07:17:51.429 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 07:17:51.533 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 07:18:00.978 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 07:18:00.979 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 07:18:08.647 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 07:18:08.648 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 07:18:17.204 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 07:18:17.205 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 07:58:50.714 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 08:58:50.727 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 08:59:49.315 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 09:17:51.435 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 09:17:51.495 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 09:17:58.557 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 09:17:58.559 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 09:18:06.437 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 09:18:06.439 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 09:18:14.640 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 09:18:14.641 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 09:58:50.731 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 10:58:50.733 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 11:17:51.448 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 11:17:51.453 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 11:18:03.712 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 11:18:03.713 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 11:18:10.970 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 11:18:10.971 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 11:18:19.626 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 11:18:19.628 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 11:58:50.745 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 12:14:48.282 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 12:58:50.751 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 13:13:18.805 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 13:17:51.458 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 13:17:51.543 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 13:17:58.122 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 13:17:58.123 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 13:18:05.843 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 13:18:05.844 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 13:18:13.927 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 13:18:13.929 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 13:58:50.765 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 14:00:00.301 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 14:20:20.892 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 14:58:50.766 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 15:17:51.476 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 15:17:51.481 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 15:18:12.224 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-14 15:18:12.266 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 15:34:10.898 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 15:58:50.770 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 16:00:42.639 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 16:58:50.781 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 17:02:21.801 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 17:06:53.386 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 17:17:51.495 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 17:17:51.500 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 17:17:59.842 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 17:17:59.843 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 17:18:07.279 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 17:18:07.280 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 17:18:15.493 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 17:18:15.494 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 17:34:05.781 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 17:58:50.778 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 18:58:50.783 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 19:17:51.506 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 19:17:51.814 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 19:18:12.222 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-14 19:18:12.253 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 19:58:50.799 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 19:59:45.281 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 20:09:06.464 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 20:28:04.547 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 20:57:41.475 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 20:58:50.805 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 21:17:51.516 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 21:17:52.043 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 21:18:05.384 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 21:18:05.385 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 21:18:12.688 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 21:18:12.689 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 21:18:21.554 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 21:18:21.555 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 21:56:35.403 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 21:58:50.815 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 22:11:24.496 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-14 22:39:50.708 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'papoune', 'action': 'Remove'} +2023-07-14 22:58:50.816 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-14 23:17:51.520 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-14 23:17:51.559 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-14 23:18:00.088 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-14 23:18:00.089 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-14 23:18:07.939 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-14 23:18:07.940 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-14 23:18:16.064 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-14 23:18:16.066 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-14 23:58:50.802 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 00:27:40.229 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 00:58:50.835 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 01:17:51.531 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 01:17:51.611 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 01:17:57.742 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 01:17:57.743 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 01:18:05.970 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 01:18:05.971 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 01:18:14.228 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 01:18:14.230 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 01:58:50.842 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 02:27:14.523 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by .strix. using {'in_game_name': 'iran_warrior'} +2023-07-15 02:28:43.883 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 02:58:50.856 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 03:17:51.541 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 03:17:51.824 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 03:18:01.376 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 03:18:01.376 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 03:18:08.662 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 03:18:08.663 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 03:18:16.979 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 03:18:16.980 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 03:58:50.868 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 04:58:50.859 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 05:17:51.556 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 05:17:51.680 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 05:18:01.239 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 05:18:01.240 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 05:18:08.934 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 05:18:08.935 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 05:18:17.683 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 05:18:17.685 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 05:38:28.670 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 05:58:50.874 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 06:58:50.868 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 07:04:30.655 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 07:17:51.559 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 07:17:52.184 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 07:18:12.229 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-15 07:18:12.259 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 07:58:50.883 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 08:58:50.887 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 09:17:51.564 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 09:17:51.569 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 09:18:02.428 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 09:18:02.429 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 09:18:09.673 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 09:18:09.674 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 09:18:19.241 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 09:18:19.242 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 09:41:19.967 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 09:58:50.890 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 10:49:49.815 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 10:58:50.907 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 11:17:51.570 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 11:17:51.696 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 11:18:03.092 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 11:18:03.093 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 11:18:10.382 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 11:18:10.383 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 11:18:19.171 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 11:18:19.172 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 11:30:24.304 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 11:58:50.918 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 12:03:08.158 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 12:58:50.927 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 13:06:25.083 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 13:13:49.012 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'bathy89', 'action': 'Remove'} +2023-07-15 13:17:51.585 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 13:17:51.588 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 13:17:57.856 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 13:17:57.857 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 13:18:05.327 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 13:18:05.328 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 13:18:05.899 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Naimatek', 'action': 'Remove'} +2023-07-15 13:18:05.900 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Naimatek', 'action': 'Remove'} +2023-07-15 13:18:05.902 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Naimatek', 'action': 'Remove'} +2023-07-15 13:18:05.905 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Naimatek', 'action': 'Remove'} +2023-07-15 13:18:12.595 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Naimatek', 'action': 'Remove'} +2023-07-15 13:18:13.947 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 13:18:13.948 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 13:20:17.602 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'patrick', 'action': 'Remove'} +2023-07-15 13:26:46.325 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'G The Insane', 'action': 'Remove'} +2023-07-15 13:27:17.357 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'jaemiller', 'action': 'Remove'} +2023-07-15 13:28:42.156 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Peps'} +2023-07-15 13:30:09.681 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Peps'} +2023-07-15 13:32:28.371 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'mateusz 1981'} +2023-07-15 13:32:39.585 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'mateusz 1981'} +2023-07-15 13:58:50.932 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 14:58:50.932 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 15:17:51.573 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 15:17:51.782 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 15:18:01.053 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 15:18:01.054 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 15:18:08.320 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 15:18:08.321 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 15:18:16.700 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 15:18:16.702 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 15:58:50.948 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 16:02:51.656 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 16:44:07.177 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 16:58:50.963 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 17:17:51.586 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 17:17:52.042 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 17:18:12.207 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 466, in _request + with timer: + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\helpers.py", line 721, in __exit__ + raise asyncio.TimeoutError from None + │ └ + └ + +asyncio.exceptions.TimeoutError +2023-07-15 17:18:12.240 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 17:56:52.123 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 17:58:50.968 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 18:32:41.647 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 18:54:23.839 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 18:58:50.977 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 19:01:38.008 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 19:17:51.597 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 19:17:51.918 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 19:17:59.047 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 19:17:59.048 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 19:18:07.038 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 19:18:07.039 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 19:18:15.289 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 19:18:15.290 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 19:58:50.982 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 20:58:50.982 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 21:12:01.590 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 21:17:51.601 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 21:17:52.143 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 21:18:05.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 21:18:05.717 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 21:18:12.508 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 21:18:12.509 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 21:18:21.529 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 21:18:21.531 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 21:58:50.984 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 22:58:50.996 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-15 23:17:51.617 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-15 23:17:51.905 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-15 23:17:59.186 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-15 23:17:59.187 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-15 23:18:06.431 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-15 23:18:06.432 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-15 23:18:14.530 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-15 23:18:14.531 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-15 23:57:56.486 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-15 23:58:51.003 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 00:02:29.367 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 00:24:07.476 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 00:58:51.004 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 01:17:51.627 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 01:17:52.678 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 01:18:01.708 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 01:18:01.709 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 01:18:08.831 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 01:18:08.831 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 01:18:17.668 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 01:18:17.669 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 01:38:20.907 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 01:51:11.462 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 01:58:50.995 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 02:13:37.862 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 02:47:03.567 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 02:58:51.019 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 03:17:51.651 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 03:17:52.991 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 03:18:00.621 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 03:18:00.622 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 03:18:07.882 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 03:18:07.883 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 03:18:16.227 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 03:18:16.228 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 03:58:51.029 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 04:18:00.205 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 04:20:29.420 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 04:48:42.597 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 04:58:51.044 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 05:17:51.660 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 05:17:51.704 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 05:18:01.486 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 05:18:01.487 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 05:18:08.926 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 05:18:08.927 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 05:18:17.496 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 05:18:17.497 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 05:58:51.053 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 06:26:02.864 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 06:58:51.064 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 06:59:05.390 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 07:17:51.680 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 07:17:51.684 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 07:18:04.641 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 07:18:04.642 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 07:18:11.694 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 07:18:11.695 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 07:18:20.299 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 07:18:20.300 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 07:58:51.060 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 08:07:15.982 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 08:11:46.460 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 08:50:05.299 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 08:58:51.080 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 09:17:51.688 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 09:17:52.337 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 09:18:03.572 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 09:18:03.574 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 09:18:10.414 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 09:18:10.415 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 09:18:18.940 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 09:18:18.942 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 09:58:51.108 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 10:07:11.274 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 10:49:50.915 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 10:58:51.109 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 11:10:51.649 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 11:17:51.694 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 11:17:52.546 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 11:18:01.793 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 11:18:01.794 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 11:18:08.988 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 11:18:08.988 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 11:18:17.530 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 11:18:17.531 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 11:25:34.524 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'ro andrei'} +2023-07-16 11:27:01.623 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ro andrei', 'action': 'Adjust Start Time', 'value': 288} +2023-07-16 11:30:01.524 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'ljtr'} +2023-07-16 11:30:08.667 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'ljtr'} +2023-07-16 11:58:51.120 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 12:58:51.132 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 13:17:51.705 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 13:17:52.756 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 13:18:04.234 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-16 13:18:04.301 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 13:31:53.376 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 13:58:51.146 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 14:31:26.413 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 14:58:51.164 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 15:17:51.705 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 15:17:52.657 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 15:18:04.140 | ERROR | cogs.shk_info_tasks:getHouseData:60 - Scan failed due to exception +Traceback (most recent call last): + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1154, in _create_direct_connection + hosts = await asyncio.shield(host_resolved) + │ │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 880, in _resolve_host + addrs = await self._resolver.resolve(host, port, family=self._family) + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 443 + │ │ │ └ 'shk.azure-api.net' + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\resolver.py", line 33, in resolve + infos = await self._loop.getaddrinfo( + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 860, in getaddrinfo + return await self.run_in_executor( + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\thread.py", line 58, in run + result = self.fn(*self.args, **self.kwargs) + │ │ └ None + │ └ None + └ None + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\socket.py", line 955, in getaddrinfo + for res in _socket.getaddrinfo(host, port, family, type, proto, flags): + │ │ │ │ │ │ │ └ + │ │ │ │ │ │ └ 0 + │ │ │ │ │ └ + │ │ │ │ └ 0 + │ │ │ └ 443 + │ │ └ 'shk.azure-api.net' + │ └ + └ + +socket.gaierror: [Errno 11001] getaddrinfo failed + + +The above exception was the direct cause of the following exception: + + +Traceback (most recent call last): + + File "c:\Users\Strix\Dev\stormbrigade_sheriff\main.py", line 105, in + sys.exit(asyncio.run(main())) + │ │ │ │ └ + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\runners.py", line 44, in run + return loop.run_until_complete(main) + │ │ └ + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 633, in run_until_complete + self.run_forever() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\windows_events.py", line 321, in run_forever + super().run_forever() + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 600, in run_forever + self._run_once() + │ └ + └ + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\base_events.py", line 1896, in _run_once + handle._run() + │ └ + └ ()> + + File "C:\Users\Strix\AppData\Local\Programs\Python\Python310\lib\asyncio\events.py", line 80, in _run + self._context.run(self._callback, *self._args) + │ │ │ │ │ └ + │ │ │ │ └ ()> + │ │ │ └ + │ │ └ ()> + │ └ + └ ()> + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\disnake\ext\tasks\__init__.py", line 162, in _loop + await self.coro(*args, **kwargs) + │ │ │ └ {} + │ │ └ (,) + │ └ + └ + +> File "c:\Users\Strix\Dev\stormbrigade_sheriff\cogs\shk_info_tasks.py", line 42, in getHouseData + async with session.get(f'https://shk.azure-api.net/shkinfo/v1/HouseActivity?world={world}&house={house}&Key={key}&subscription-key=ff2e578e119348ea8b48a2acd2f5a48d',timeout=20) as houseActivity: + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 1138, in __aenter__ + self._resp = await self._coro + │ │ │ └ + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\client.py", line 535, in _request + conn = await self._connector.connect( + │ └ None + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 542, in connect + proto = await self._create_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 907, in _create_connection + _, proto = await self._create_direct_connection(req, traces, timeout) + │ │ │ │ └ ClientTimeout(total=20, connect=None, sock_read=None, sock_connect=None) + │ │ │ └ [] + │ │ └ + │ └ + └ + + File "C:\Users\Strix\Dev\stormbrigade_sheriff\sbsheriff\lib\site-packages\aiohttp\connector.py", line 1166, in _create_direct_connection + raise ClientConnectorError(req.connection_key, exc) from exc + │ │ └ + │ └ + └ + +aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host shk.azure-api.net:443 ssl:default [getaddrinfo failed] +2023-07-16 15:18:04.206 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 15:42:18.205 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 15:58:51.164 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 16:01:35.692 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'boycott'} +2023-07-16 16:32:33.287 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 16:46:22.226 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 16:58:51.171 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 17:17:51.719 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 17:17:51.777 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 17:17:59.615 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 17:17:59.616 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 17:18:07.744 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 17:18:07.745 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 17:18:16.589 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 17:18:16.590 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 17:58:51.169 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 18:58:51.175 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 19:17:51.714 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 19:17:55.307 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 19:18:07.835 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 19:18:07.836 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 19:18:15.989 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 19:18:15.990 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 19:18:24.958 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 19:18:24.959 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 19:58:51.169 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 20:31:30.305 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 20:58:51.193 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 21:17:51.717 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 21:17:52.552 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 21:18:06.525 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 21:18:06.526 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 21:18:13.157 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 21:18:13.159 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 21:18:21.921 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 21:18:21.922 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 21:58:51.186 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 22:30:54.482 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 22:38:11.997 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 22:48:57.583 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 22:58:51.213 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-16 23:17:51.743 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-16 23:17:52.074 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-16 23:18:00.280 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-16 23:18:00.281 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-16 23:18:07.979 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-16 23:18:07.980 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-16 23:18:16.482 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-16 23:18:16.483 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-16 23:38:11.506 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 23:42:00.438 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-16 23:58:51.206 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 00:58:51.218 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 01:01:07.633 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 01:17:51.759 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 01:17:52.676 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 01:18:02.261 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 01:18:02.261 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 01:18:09.439 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 01:18:09.440 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 01:18:18.390 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 01:18:18.391 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 01:41:45.248 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by urcracked#8261 using {'in_game_name': 'fatcobra'} +2023-07-17 01:58:51.235 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 02:58:51.242 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 03:17:51.766 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 03:17:51.806 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 03:18:06.487 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 03:18:06.488 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 03:18:13.630 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 03:18:13.631 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 03:18:22.710 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 03:18:22.710 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 03:58:51.247 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 04:58:51.254 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 05:17:51.766 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 05:17:52.031 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 05:18:06.309 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 05:18:06.310 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 05:18:13.481 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 05:18:13.482 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 05:18:22.591 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 05:18:22.592 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 05:58:51.269 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 06:12:49.131 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 06:58:51.289 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 07:17:51.779 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 07:17:52.188 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 07:17:58.905 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 07:17:58.906 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 07:18:06.616 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 07:18:06.617 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 07:18:15.196 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 07:18:15.197 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 07:58:51.295 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 08:58:51.305 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 09:17:51.790 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 09:17:51.825 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 09:17:59.986 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 09:17:59.987 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 09:18:07.869 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 09:18:07.870 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 09:18:16.333 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 09:18:16.334 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 09:58:51.315 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 10:58:51.320 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 11:17:51.801 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 11:17:51.935 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 11:18:05.397 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 11:18:05.398 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 11:18:12.571 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 11:18:12.572 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 11:18:21.512 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 11:18:21.513 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 11:58:51.325 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 12:09:07.280 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 12:37:45.203 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 12:58:51.322 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 13:17:51.815 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 13:17:51.821 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 13:17:59.539 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 13:17:59.540 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 13:18:07.285 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 13:18:07.286 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 13:18:16.008 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 13:18:16.009 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 13:25:41.320 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 13:58:51.326 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 14:10:49.848 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 14:33:58.573 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 14:54:20.398 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 14:58:51.336 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 15:13:57.585 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 15:17:51.810 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 15:17:51.848 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 15:17:57.567 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 15:17:57.568 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 15:18:05.626 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 15:18:05.627 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 15:18:13.934 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 15:18:13.935 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 15:58:51.346 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 16:58:51.367 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 17:17:51.826 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 17:17:51.851 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 17:17:55.843 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 17:17:55.846 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 17:18:03.632 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 17:18:03.633 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 17:18:12.297 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 17:18:12.298 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 17:53:30.032 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 17:58:51.381 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 18:05:37.820 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 18:37:16.410 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 18:54:25.095 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 18:58:51.394 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 19:17:51.834 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 19:17:51.963 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 19:18:00.544 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 19:18:00.546 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 19:18:08.121 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 19:18:08.122 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 19:18:16.572 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 19:18:16.573 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 19:47:26.554 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 19:58:51.408 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 20:28:46.059 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 20:45:41.524 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 20:58:51.406 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 21:08:08.291 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 21:17:51.826 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 21:17:51.861 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 21:17:56.023 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 21:17:56.024 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 21:18:04.466 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 21:18:04.467 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 21:18:12.796 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 21:18:12.797 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 21:42:54.924 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 21:58:51.430 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 22:08:19.587 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 22:58:51.422 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-17 23:17:50.616 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 23:17:51.843 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-17 23:17:52.117 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-17 23:17:56.437 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-17 23:17:56.438 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-17 23:18:04.610 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-17 23:18:04.611 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-17 23:18:12.829 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-17 23:18:12.830 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-17 23:20:18.407 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-17 23:58:51.444 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 00:46:27.631 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'darXtar', 'action': 'Remove'} +2023-07-18 00:48:54.454 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'I AM JAM3S', 'action': 'Remove'} +2023-07-18 00:58:52.506 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 01:17:52.918 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 01:17:52.923 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 01:18:04.849 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 01:18:04.850 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 01:18:12.041 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 01:18:12.042 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 01:18:20.985 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 01:18:20.986 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 01:42:33.674 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Sadon Brom'} +2023-07-18 01:45:00.310 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 01:58:52.514 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 02:02:13.694 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 02:58:52.524 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 03:17:52.917 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 03:17:52.922 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 03:17:58.453 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 03:17:58.454 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 03:18:06.163 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 03:18:06.164 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 03:18:19.146 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 03:18:19.148 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 03:58:52.537 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 04:58:52.549 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 04:59:50.760 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 05:17:52.919 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 05:17:52.944 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 05:18:00.098 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 05:18:00.099 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 05:18:07.938 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 05:18:07.939 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 05:18:18.839 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 05:18:18.840 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 05:58:52.545 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 06:58:52.558 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 07:15:11.065 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 07:17:52.942 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 07:17:53.099 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 07:18:00.287 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 07:18:00.287 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 07:18:08.234 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 07:18:08.235 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 07:18:16.664 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 07:18:16.665 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 07:58:52.563 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 08:58:52.566 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 09:17:52.937 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 09:17:52.943 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 09:17:57.922 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 09:17:57.923 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 09:18:06.042 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 09:18:06.043 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 09:18:14.174 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 09:18:14.176 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 09:58:52.580 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 10:31:56.021 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 10:58:52.584 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 11:17:52.954 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 11:17:52.958 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 11:18:01.850 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 11:18:01.851 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 11:18:09.628 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 11:18:09.629 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 11:18:18.411 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 11:18:18.413 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 11:58:52.594 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 12:07:09.005 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Boycott', 'action': 'Remove'} +2023-07-18 12:08:22.413 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Fatfrog', 'action': 'Remove'} +2023-07-18 12:09:36.973 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ro andrei', 'action': 'Remove'} +2023-07-18 12:10:39.821 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Ethan22', 'action': 'Remove'} +2023-07-18 12:12:52.326 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Schwarzer Nero', 'action': 'Remove'} +2023-07-18 12:33:32.869 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 12:43:16.153 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 12:58:52.613 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 13:10:33.006 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 13:17:52.949 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 13:17:52.995 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 13:17:57.939 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 13:17:57.940 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 13:18:06.350 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 13:18:06.351 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 13:18:14.883 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 13:18:14.884 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 13:24:42.700 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 13:48:31.928 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 13:58:52.616 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 14:58:52.612 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 15:17:52.956 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 15:17:52.985 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 15:18:01.970 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 15:18:01.971 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 15:18:09.581 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 15:18:09.582 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 15:18:18.420 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 15:18:18.421 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 15:44:01.234 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 15:58:52.620 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 16:40:07.099 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 16:41:15.935 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'HOTOP'} +2023-07-18 16:41:53.910 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by Salamony#7090 using {'in_game_name': 'HOTOP', 'action': 'Remove'} +2023-07-18 16:42:00.916 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'HOTOP'} +2023-07-18 16:58:52.619 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 17:17:52.970 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 17:17:52.976 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 17:17:59.946 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 17:17:59.947 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 17:18:07.656 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 17:18:07.657 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 17:18:15.858 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 17:18:15.859 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 17:58:52.626 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 18:58:52.628 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 19:17:52.987 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 19:17:53.067 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 19:18:01.806 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 19:18:01.807 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 19:18:09.767 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 19:18:09.768 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 19:18:18.650 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 19:18:18.651 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 19:52:15.551 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 19:58:11.729 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 19:58:52.647 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 20:58:52.650 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 21:17:53.007 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 21:17:54.804 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 21:18:09.156 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 21:18:09.157 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 21:18:17.101 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 21:18:17.102 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 21:18:26.561 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 21:18:26.563 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 21:58:52.657 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 22:58:52.658 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-18 23:17:53.024 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-18 23:17:53.080 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-18 23:18:02.766 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-18 23:18:02.767 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-18 23:18:10.710 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-18 23:18:10.711 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-18 23:18:19.415 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-18 23:18:19.416 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-18 23:42:20.227 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-18 23:58:52.678 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 00:52:30.864 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 00:58:52.696 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 01:17:53.030 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 01:17:53.063 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 01:17:59.942 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 01:17:59.944 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 01:18:08.090 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 01:18:08.091 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 01:18:18.059 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 01:18:18.061 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 01:58:52.692 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 02:58:52.696 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 03:05:36.273 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 03:10:08.182 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 03:17:53.030 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 03:17:53.770 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 03:18:06.514 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 03:18:06.516 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 03:18:13.824 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 03:18:13.825 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 03:18:22.815 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 03:18:22.816 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 03:58:52.709 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 04:01:02.043 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 04:58:52.714 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 05:17:53.026 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 05:17:53.050 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 05:18:00.344 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 05:18:00.345 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 05:18:07.932 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 05:18:07.933 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 05:18:15.984 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 05:18:15.985 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 05:58:52.713 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 06:58:52.720 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 07:17:53.036 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 07:17:53.168 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 07:18:03.626 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 07:18:03.627 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 07:18:11.070 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 07:18:11.070 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 07:18:20.399 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 07:18:20.400 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 07:58:52.734 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 08:58:52.744 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 09:17:53.051 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 09:17:53.073 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 09:18:03.047 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 09:18:03.049 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 09:18:11.035 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 09:18:11.036 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 09:18:19.934 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 09:18:19.935 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 09:58:52.760 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 10:23:47.201 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 10:58:52.785 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 11:17:53.064 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 11:17:53.068 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 11:18:07.327 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 11:18:07.328 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 11:18:14.550 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 11:18:14.551 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 11:18:23.406 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 11:18:23.407 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 11:41:57.731 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 11:44:57.609 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'georglll'} +2023-07-19 11:45:26.877 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'georgIII'} +2023-07-19 11:45:48.891 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'GeorgIII'} +2023-07-19 11:46:08.435 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'GeorgIII'} +2023-07-19 11:58:52.790 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 12:56:52.468 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 12:58:52.791 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 13:17:53.075 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 13:17:53.079 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 13:18:03.809 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 13:18:03.810 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 13:18:11.114 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 13:18:11.114 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 13:18:19.951 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 13:18:19.952 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 13:58:52.805 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 14:58:52.807 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 15:17:19.003 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 15:17:53.082 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 15:17:53.085 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 15:18:00.227 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 15:18:00.229 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 15:18:07.829 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 15:18:07.830 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 15:18:16.048 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 15:18:16.049 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 15:58:37.963 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 15:58:52.807 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 16:16:04.385 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-07-19 16:16:04.387 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-07-19 16:16:04.430 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-07-19 16:16:04.431 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-07-19 16:16:04.433 | INFO | cogs.shk_info_tasks:setup:64 - cogs.shk_info_tasks Loaded +2023-07-19 16:16:04.435 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-07-19 16:16:04.446 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-19 16:16:04.448 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-07-19 16:16:04.449 | INFO | __main__:main:101 - Starting bot +2023-07-19 16:16:07.446 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 16:16:07.448 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 16:16:07.450 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-19 16:16:07.606 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 16:16:11.475 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 16:16:11.477 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 16:16:19.779 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 16:16:19.781 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 16:16:28.093 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 16:16:28.094 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 16:17:34.902 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-19 16:17:34.904 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 17:17:34.901 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 18:16:04.476 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 18:16:04.479 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 18:16:08.457 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 18:16:08.458 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 18:16:16.390 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 18:16:16.391 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 18:16:24.951 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 18:16:24.952 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 18:17:34.909 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 19:17:34.921 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 20:16:04.486 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 20:16:04.489 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 20:16:08.281 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 20:16:08.281 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 20:16:20.496 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 20:16:20.497 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 20:16:29.095 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 20:16:29.096 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 20:17:34.936 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 21:17:34.948 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 22:16:04.486 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-19 22:16:04.489 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-19 22:16:08.333 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-19 22:16:08.334 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-19 22:16:15.922 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-19 22:16:15.923 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-19 22:16:24.485 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-19 22:16:24.486 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-19 22:17:34.964 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-19 23:17:34.963 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 00:16:04.499 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 00:16:04.525 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 00:16:08.530 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 00:16:08.532 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 00:16:17.217 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 00:16:17.218 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 00:16:25.759 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 00:16:25.760 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 00:17:34.985 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 01:17:35.001 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 02:16:04.514 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 02:16:04.580 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 02:16:08.697 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 02:16:08.698 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 02:16:16.415 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 02:16:16.417 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 02:16:24.661 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 02:16:24.662 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 02:17:35.011 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 03:17:35.008 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 04:16:04.508 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 04:16:04.780 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 04:16:08.744 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 04:16:08.745 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 04:16:16.680 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 04:16:16.681 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 04:16:24.853 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 04:16:24.855 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 04:17:35.027 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 05:17:35.034 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 06:16:04.522 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 06:16:04.543 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 06:16:08.454 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 06:16:08.455 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 06:16:16.437 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 06:16:16.438 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 06:16:24.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 06:16:24.717 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 06:17:35.040 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 07:17:35.047 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 08:16:04.534 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 08:16:04.539 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 08:16:08.413 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 08:16:08.414 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 08:16:16.601 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 08:16:16.602 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 08:16:24.774 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 08:16:24.774 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 08:17:35.068 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 09:17:35.071 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 10:16:04.544 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 10:16:04.562 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 10:16:08.350 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 10:16:08.351 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 10:16:15.825 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 10:16:15.826 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 10:16:24.074 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 10:16:24.075 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 10:17:35.077 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 11:17:35.087 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 12:16:04.561 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 12:16:04.564 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 12:16:08.341 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 12:16:08.342 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 12:16:15.854 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 12:16:15.855 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 12:16:24.258 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 12:16:24.260 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 12:17:35.093 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 13:17:35.109 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 14:16:04.585 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 14:16:04.590 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 14:16:08.262 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 14:16:08.263 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 14:16:15.560 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 14:16:15.561 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 14:16:23.952 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 14:16:23.953 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 14:17:35.106 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:17:35.106 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:21:22.673 | INFO | cogs.vm_tracker:setup:175 - cogs.vm_tracker Loaded +2023-07-20 15:21:22.679 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:28:58.937 | INFO | cogs.vm_tracker:setup:176 - cogs.vm_tracker Loaded +2023-07-20 15:28:58.938 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:29:58.654 | INFO | cogs.vm_tracker:setup:179 - cogs.vm_tracker Loaded +2023-07-20 15:29:58.656 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:30:53.303 | INFO | cogs.vm_tracker:setup:180 - cogs.vm_tracker Loaded +2023-07-20 15:30:53.305 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:31:13.543 | INFO | cogs.vm_tracker:setup:180 - cogs.vm_tracker Loaded +2023-07-20 15:31:13.545 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:34:35.889 | INFO | cogs.vm_tracker:setup:182 - cogs.vm_tracker Loaded +2023-07-20 15:34:35.890 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:35:21.390 | INFO | cogs.vm_tracker:setup:182 - cogs.vm_tracker Loaded +2023-07-20 15:35:21.392 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:36:07.990 | INFO | cogs.vm_tracker:setup:178 - cogs.vm_tracker Loaded +2023-07-20 15:36:07.991 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:37:17.737 | INFO | cogs.vm_tracker:setup:178 - cogs.vm_tracker Loaded +2023-07-20 15:37:17.739 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 15:37:23.775 | INFO | cogs.vm_tracker:setup:174 - cogs.vm_tracker Loaded +2023-07-20 15:37:23.777 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 16:16:04.601 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 16:16:04.615 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 16:16:08.755 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 16:16:08.757 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 16:16:16.548 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 16:16:16.550 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 16:16:26.041 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 16:16:26.043 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 16:37:23.798 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 17:37:23.800 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 18:16:04.607 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 18:16:04.619 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 18:16:08.392 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 18:16:08.394 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 18:16:16.506 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 18:16:16.507 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 18:16:24.992 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 18:16:24.993 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 18:37:23.819 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 19:37:23.832 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 20:16:04.622 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 20:16:04.636 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 20:16:08.661 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 20:16:08.662 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 20:16:16.804 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 20:16:16.805 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 20:16:26.101 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 20:16:26.102 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 20:37:23.844 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 20:55:49.886 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by boergeiam using {'in_game_name': 'Queen Kim'} +2023-07-20 21:37:23.853 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 22:16:04.618 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-20 22:16:04.629 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-20 22:16:08.953 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-20 22:16:08.954 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-20 22:16:17.257 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-20 22:16:17.258 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-20 22:16:25.789 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-20 22:16:25.791 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-20 22:37:23.873 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-20 23:37:23.876 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 00:16:04.641 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 00:16:04.653 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 00:16:08.655 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 00:16:08.656 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 00:16:16.576 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 00:16:16.577 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 00:16:24.879 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 00:16:24.880 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 00:37:23.872 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 01:37:23.862 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 02:16:04.640 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 02:16:04.854 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 02:16:10.161 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 02:16:10.162 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 02:16:18.662 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 02:16:18.663 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 02:16:27.138 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 02:16:27.140 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 02:37:23.881 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 03:37:23.890 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 04:16:04.649 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 04:16:04.694 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 04:16:08.945 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 04:16:08.946 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 04:16:17.324 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 04:16:17.325 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 04:16:25.935 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 04:16:25.936 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 04:37:23.899 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 05:37:23.901 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 06:16:04.652 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 06:16:04.656 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 06:16:08.630 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 06:16:08.631 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 06:16:16.006 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 06:16:16.007 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 06:16:24.736 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 06:16:24.737 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 06:37:23.899 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 07:37:23.918 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 08:16:04.646 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 08:16:04.651 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 08:16:08.574 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 08:16:08.575 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 08:16:16.254 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 08:16:16.255 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 08:16:38.973 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 08:16:38.975 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 08:37:23.916 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 09:37:23.929 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 10:16:04.638 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 10:16:05.086 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 10:16:09.351 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 10:16:09.352 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 10:16:17.161 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 10:16:17.162 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 10:16:25.501 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 10:16:25.502 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 10:37:23.944 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 11:37:23.956 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 12:16:04.663 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 12:16:04.667 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 12:16:08.607 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 12:16:08.608 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 12:16:16.099 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 12:16:16.101 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 12:16:24.858 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 12:16:24.859 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 12:37:23.975 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 13:37:23.968 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 14:16:04.668 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 14:16:04.701 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 14:16:08.856 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 14:16:08.857 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 14:16:16.749 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 14:16:16.751 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 14:16:25.614 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 14:16:25.616 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 14:37:23.977 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 15:37:23.980 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 16:16:04.669 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 16:16:04.698 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 16:16:08.558 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 16:16:08.559 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 16:16:16.587 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 16:16:16.588 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 16:16:25.055 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 16:16:25.056 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 16:37:23.986 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 17:37:23.999 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 18:16:04.688 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 18:16:04.693 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 18:16:08.797 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 18:16:08.797 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 18:16:16.566 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 18:16:16.567 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 18:16:25.166 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 18:16:25.167 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 18:37:23.992 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 19:37:24.008 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 20:16:04.694 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 20:16:04.699 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 20:16:08.850 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 20:16:08.851 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 20:16:17.270 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 20:16:17.271 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 20:16:25.627 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 20:16:25.628 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 20:37:24.023 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 21:37:24.034 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 21:45:25.714 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'lulica1', 'action': 'Remove'} +2023-07-21 22:16:04.703 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-21 22:16:04.708 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-21 22:16:08.658 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-21 22:16:08.659 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-21 22:16:16.566 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-21 22:16:16.567 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-21 22:16:24.723 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-21 22:16:24.724 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-21 22:37:24.044 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-21 23:24:46.046 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'Jaemiller'} +2023-07-21 23:25:05.003 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'jaemiller'} +2023-07-21 23:37:24.046 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 00:16:04.708 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 00:16:04.714 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 00:16:09.202 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 00:16:09.203 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 00:16:17.937 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 00:16:17.939 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 00:16:26.824 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 00:16:26.826 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 00:37:24.049 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 01:37:24.054 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 02:16:04.717 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 02:16:05.062 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 02:16:09.766 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 02:16:09.767 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 02:16:17.659 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 02:16:17.660 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 02:16:26.081 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 02:16:26.082 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 02:37:24.067 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 03:37:24.065 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 04:16:04.710 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 04:16:04.715 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 04:16:08.721 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 04:16:08.722 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 04:16:16.629 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 04:16:16.630 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 04:16:27.343 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 04:16:27.345 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 04:37:24.081 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 05:37:24.087 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 06:16:04.741 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 06:16:04.744 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 06:16:08.898 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 06:16:08.899 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 06:16:17.216 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 06:16:17.217 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 06:16:25.594 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 06:16:25.595 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 06:37:24.109 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 07:37:24.113 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 08:16:04.748 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 08:16:04.751 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 08:16:08.776 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 08:16:08.777 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 08:16:16.566 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 08:16:16.567 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 08:16:25.030 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 08:16:25.031 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 08:37:24.130 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 09:37:24.145 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 10:16:04.747 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 10:16:04.753 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 10:16:08.737 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 10:16:08.738 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 10:16:17.279 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 10:16:17.280 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 10:16:25.546 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 10:16:25.546 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 10:31:04.382 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Hulksmash', 'action': 'Remove'} +2023-07-22 10:37:24.154 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 11:37:24.151 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 12:16:04.765 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 12:16:04.771 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 12:16:08.594 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 12:16:08.595 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 12:16:16.898 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 12:16:16.898 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 12:16:36.706 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 12:16:36.707 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 12:37:24.152 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 13:37:24.152 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 14:16:04.757 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 14:16:04.770 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 14:16:09.132 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 14:16:09.134 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 14:16:17.343 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 14:16:17.344 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 14:16:25.760 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 14:16:25.762 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 14:37:24.172 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 15:37:24.188 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 16:16:04.774 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 16:16:04.868 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 16:16:09.574 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 16:16:09.576 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 16:16:18.067 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 16:16:18.068 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 16:16:26.709 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 16:16:26.711 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 16:37:24.195 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 17:37:24.199 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 18:16:04.797 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 18:16:04.806 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 18:16:09.025 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 18:16:09.027 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 18:16:17.396 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 18:16:17.397 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 18:16:25.849 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 18:16:25.850 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 18:37:24.210 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 19:31:17.962 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by Salamony#7090 using {'in_game_name': 'Kosynier'} +2023-07-22 19:37:24.213 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 20:16:04.798 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-22 20:16:04.811 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-22 20:16:08.755 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-22 20:16:08.756 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-22 20:16:16.637 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-22 20:16:16.638 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-22 20:16:25.285 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-22 20:16:25.286 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-22 20:37:24.217 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-22 21:37:24.218 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 00:00:41.044 | INFO | cogs.relationship_manager:setup:74 - cogs.relationship_manager Loaded +2023-07-26 00:00:41.047 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.relationship_manager +2023-07-26 00:00:41.086 | INFO | cogs.setup:setup:65 - cogs.setup Loaded +2023-07-26 00:00:41.087 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.setup +2023-07-26 00:00:41.090 | INFO | cogs.shk_info_tasks:setup:64 - cogs.shk_info_tasks Loaded +2023-07-26 00:00:41.091 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.shk_info_tasks +2023-07-26 00:00:41.097 | INFO | cogs.vm_tracker:setup:174 - cogs.vm_tracker Loaded +2023-07-26 00:00:41.099 | INFO | __main__:load_extensions:43 - Cog loaded: cogs.vm_tracker +2023-07-26 00:00:41.099 | INFO | __main__:main:101 - Starting bot +2023-07-26 00:00:46.477 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 00:00:46.479 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 00:00:46.481 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-26 00:00:48.913 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 00:00:55.174 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 00:00:55.175 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 00:01:05.856 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 00:01:05.857 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 00:01:13.804 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 00:01:13.805 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 01:00:41.136 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 02:00:41.139 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 02:00:41.144 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 02:00:41.149 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 02:00:44.901 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 02:00:44.902 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 02:00:52.774 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 02:00:52.775 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 02:01:00.667 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 02:01:00.669 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 03:00:41.148 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 04:00:41.155 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 04:00:41.157 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 04:00:41.175 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 04:00:44.859 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 04:00:44.860 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 04:00:52.834 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 04:00:52.835 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 04:01:00.793 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 04:01:00.793 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 05:00:41.162 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 06:00:41.171 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 06:00:41.172 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 06:00:41.176 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 06:00:47.582 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 06:00:47.582 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 06:00:54.957 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 06:00:54.958 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 06:01:03.201 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 06:01:03.202 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 07:00:41.187 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 08:00:41.188 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 08:00:41.189 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 08:00:41.191 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 08:00:45.124 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 08:00:45.125 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 08:00:52.794 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 08:00:52.795 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 08:01:00.654 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 08:01:00.655 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 09:00:41.197 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 10:00:41.204 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 10:00:41.206 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 10:00:41.210 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 10:00:49.912 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 10:00:49.913 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 10:00:57.331 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 10:00:57.332 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 10:01:05.532 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 10:01:05.533 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 11:00:41.216 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 12:00:41.220 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 12:00:41.221 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 12:00:41.226 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 12:00:45.045 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 12:00:45.046 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 12:00:52.821 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 12:00:52.822 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 12:01:00.947 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 12:01:00.948 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 13:00:41.224 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 14:00:41.229 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 14:00:41.231 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 14:00:41.235 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 14:00:45.864 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 14:00:45.865 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 14:00:53.953 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 14:00:53.953 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 14:01:02.057 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 14:01:02.059 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 15:00:41.242 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 16:00:41.241 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 16:00:41.243 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 16:00:41.596 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 16:00:46.352 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 16:00:46.353 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 16:00:54.315 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 16:00:54.316 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 16:01:03.408 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 16:01:03.409 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 17:00:41.254 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 18:00:41.254 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 18:00:41.255 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 18:00:41.258 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 18:00:46.060 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 18:00:46.061 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 18:00:53.963 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 18:00:53.964 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 18:01:02.652 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 18:01:02.653 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 19:00:41.267 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 20:00:41.263 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 20:00:41.264 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 20:00:41.268 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 20:00:46.275 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 20:00:46.276 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 20:00:53.954 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 20:00:53.955 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 20:01:02.054 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 20:01:02.055 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 21:00:41.286 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 22:00:41.290 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-26 22:00:41.291 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-26 22:00:41.295 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-26 22:00:47.056 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-26 22:00:47.057 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-26 22:00:54.386 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-26 22:00:54.387 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-26 22:01:02.785 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-26 22:01:02.786 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-26 23:00:41.275 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 00:00:41.301 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 00:00:41.302 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 00:00:41.305 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 00:00:46.742 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 00:00:46.743 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 00:00:54.297 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 00:00:54.299 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 00:01:02.681 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 00:01:02.683 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 01:00:41.314 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 02:00:41.310 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 02:00:41.311 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 02:00:41.316 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 02:00:47.858 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 02:00:47.860 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 02:00:55.324 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 02:00:55.325 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 02:01:04.618 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 02:01:04.620 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 03:00:41.323 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 04:00:41.323 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 04:00:41.324 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 04:00:41.386 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 04:00:45.886 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 04:00:45.887 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 04:00:53.962 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 04:00:53.964 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 04:01:01.944 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 04:01:01.945 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 05:00:41.337 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 06:00:41.346 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 06:00:41.347 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 06:00:41.350 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 06:00:47.935 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 06:00:47.937 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 06:00:55.629 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 06:00:55.630 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 06:01:04.076 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 06:01:04.078 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 07:00:41.352 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 08:00:41.357 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 08:00:41.357 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 08:00:41.360 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 08:00:46.835 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 08:00:46.836 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 08:00:54.509 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 08:00:54.511 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 08:01:02.939 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 08:01:02.940 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 09:00:41.359 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 10:00:41.354 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 10:00:41.355 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 10:00:41.358 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 10:00:45.509 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 10:00:45.509 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 10:00:53.583 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 10:00:53.584 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 10:01:02.494 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 10:01:02.495 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 11:00:41.366 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 12:00:41.365 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 12:00:41.366 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 12:00:41.369 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 12:00:47.401 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 12:00:47.403 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 12:00:54.865 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 12:00:54.865 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 12:01:03.491 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 12:01:03.492 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 13:00:41.373 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 14:00:41.386 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 14:00:41.387 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 14:00:41.391 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 14:00:46.145 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 14:00:46.146 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 14:00:54.039 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 14:00:54.040 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 14:01:02.632 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 14:01:02.633 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 15:00:41.392 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 16:00:41.394 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 16:00:41.395 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 16:00:41.398 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 16:00:46.900 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 16:00:46.901 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 16:00:54.571 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 16:00:54.572 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 16:01:06.630 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 16:01:06.631 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 17:00:41.406 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 18:00:41.400 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 18:00:41.407 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 18:00:41.416 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 18:00:47.711 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 18:00:47.712 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 18:00:55.715 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 18:00:55.716 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 18:01:04.677 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 18:01:04.679 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 19:00:41.420 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 20:00:41.409 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 20:00:41.420 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 20:00:41.422 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 20:00:45.112 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 20:00:45.113 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 20:00:53.145 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 20:00:53.146 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 20:01:01.004 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 20:01:01.005 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 21:00:41.418 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 22:00:41.411 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-27 22:00:41.413 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-27 22:00:41.609 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-27 22:00:48.007 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-27 22:00:48.008 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-27 22:00:55.781 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-27 22:00:55.782 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-27 22:01:04.647 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-27 22:01:04.648 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-27 23:00:41.419 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 00:00:41.408 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 00:00:41.439 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 00:00:41.447 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 00:00:45.459 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 00:00:45.460 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 00:00:53.449 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 00:00:53.451 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 00:01:01.824 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 00:01:01.825 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 01:00:41.430 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 02:00:41.421 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 02:00:41.437 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 02:00:41.440 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 02:00:46.187 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 02:00:46.188 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 02:00:54.192 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 02:00:54.193 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 02:01:03.043 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 02:01:03.044 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 03:00:41.453 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 04:00:41.443 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 04:00:41.445 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 04:00:41.448 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 04:00:45.659 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 04:00:45.660 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 04:00:53.712 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 04:00:53.713 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 04:01:02.009 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 04:01:02.011 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 05:00:41.450 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 06:00:41.451 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 06:00:41.453 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 06:00:41.455 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 06:00:47.766 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 06:00:47.767 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 06:00:59.953 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 06:00:59.954 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 06:01:08.670 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 06:01:08.671 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 07:00:41.446 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 08:00:41.457 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 08:00:41.458 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 08:00:41.460 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 08:00:46.305 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 08:00:46.306 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 08:00:54.018 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 08:00:54.019 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 08:01:02.601 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 08:01:02.602 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 09:00:41.448 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 10:00:41.471 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 10:00:41.472 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 10:00:41.476 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 10:00:47.826 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 10:00:47.827 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 10:00:55.362 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 10:00:55.363 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 10:01:06.077 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 10:01:06.079 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 11:00:41.465 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 12:00:41.461 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 12:00:41.468 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 12:00:41.470 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 12:00:47.961 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 12:00:47.962 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 12:00:55.826 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 12:00:55.827 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 12:01:04.466 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 12:01:04.468 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 13:00:41.467 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 14:00:41.462 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 14:00:41.470 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 14:00:41.472 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 14:00:46.924 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 14:00:46.925 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 14:01:03.796 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 14:01:03.797 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 14:01:11.677 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 14:01:11.678 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 15:00:41.486 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 16:00:41.496 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 16:00:41.513 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 16:00:41.525 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 16:00:48.691 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 16:00:48.692 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 16:00:56.383 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 16:00:56.385 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 16:01:04.692 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 16:01:04.693 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 17:00:41.517 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 18:00:41.500 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 18:00:41.502 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 18:00:41.507 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 18:00:45.237 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 18:00:45.238 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 18:00:53.017 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 18:00:53.018 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 18:01:00.937 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 18:01:00.939 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 19:00:41.525 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 19:11:08.288 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by valkat using {'in_game_name': 'Mazza1205'} +2023-07-28 19:11:35.055 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by valkat using {'in_game_name': 'Mazza1205'} +2023-07-28 20:00:41.506 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 20:00:41.510 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 20:00:41.511 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 20:00:45.362 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 20:00:45.363 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 20:00:53.362 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 20:00:53.363 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 20:01:01.721 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 20:01:01.722 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 21:00:41.528 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 22:00:41.515 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-28 22:00:41.530 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-28 22:00:41.538 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-28 22:00:49.617 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-28 22:00:49.619 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-28 22:00:57.474 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-28 22:00:57.475 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-28 22:01:07.564 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-28 22:01:07.567 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-28 22:20:14.389 | INFO | __main__:on_ready:30 - Ready - using 2.9.0 +2023-07-28 23:00:41.528 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 00:00:41.538 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 00:00:41.540 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 00:00:41.543 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 00:00:45.265 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 00:00:45.267 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 00:00:52.880 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 00:00:52.880 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 00:01:01.064 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 00:01:01.065 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 01:00:41.556 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 02:00:41.539 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 02:00:41.543 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 02:00:41.570 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 02:00:45.248 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 02:00:45.249 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 02:00:53.014 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 02:00:53.015 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 02:01:00.861 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 02:01:00.862 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 03:00:41.583 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 04:00:41.547 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 04:00:41.552 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 04:00:41.594 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 04:00:47.010 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 04:00:47.011 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 04:00:54.613 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 04:00:54.614 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 04:01:03.231 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 04:01:03.235 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 05:00:41.587 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 06:00:41.558 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 06:00:41.586 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 06:00:41.592 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 06:00:46.335 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 06:00:46.336 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 06:00:54.357 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 06:00:54.358 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 06:01:03.023 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 06:01:03.024 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 06:42:42.827 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'KingJohn1743'} +2023-07-29 06:43:36.266 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'KingJohn1743', 'action': 'Adjust Start Time', 'value': 7} +2023-07-29 07:00:41.607 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 07:00:47.503 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Sirvok'} +2023-07-29 07:03:32.566 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Mazza1205'} +2023-07-29 07:03:42.303 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Mazza1205', 'action': 'Adjust Start Time', 'value': 24} +2023-07-29 07:04:39.536 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'boldstealth1'} +2023-07-29 07:05:02.518 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Boldstealth1'} +2023-07-29 07:05:20.174 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Boldstealth1', 'action': 'Adjust Start Time', 'value': 21} +2023-07-29 07:06:02.988 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Boldstealth1', 'action': 'Adjust Start Time', 'value': 3} +2023-07-29 07:06:50.815 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'drag o'} +2023-07-29 07:07:06.662 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'Drag o'} +2023-07-29 07:07:50.049 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Drag o', 'action': 'Adjust Start Time', 'value': 34} +2023-07-29 07:09:09.779 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'HitMaster'} +2023-07-29 07:11:34.069 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'HitMaster', 'action': 'Adjust Start Time', 'value': 134} +2023-07-29 08:00:41.583 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 08:00:41.588 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 08:00:41.589 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 08:00:47.969 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 08:00:47.970 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 08:00:55.462 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 08:00:55.463 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 08:01:04.342 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 08:01:04.343 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 09:00:41.624 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 10:00:41.584 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 10:00:41.589 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 10:00:41.631 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 10:00:46.258 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 10:00:46.259 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 10:00:53.984 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 10:00:53.985 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 10:01:05.032 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 10:01:05.033 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 11:00:41.646 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 12:00:41.596 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 12:00:41.599 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 12:00:41.658 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 12:00:45.367 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 12:00:45.368 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 12:00:53.194 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 12:00:53.195 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 12:01:00.943 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 12:01:00.945 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 13:00:41.672 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 14:00:41.587 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 14:00:41.593 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 14:00:41.681 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 14:00:47.859 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 14:00:47.860 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 14:00:55.691 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 14:00:55.692 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 14:01:04.171 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 14:01:04.173 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 15:00:41.684 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 16:00:41.598 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 16:00:41.603 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 16:00:41.691 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 16:00:50.171 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 16:00:50.172 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 16:00:58.180 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 16:00:58.181 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 16:01:07.289 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 16:01:07.290 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 17:00:41.704 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 18:00:41.604 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 18:00:41.693 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 18:00:41.712 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 18:00:48.153 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 18:00:48.154 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 18:00:55.955 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 18:00:55.956 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 18:01:04.783 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 18:01:04.784 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 19:00:41.703 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 20:00:41.602 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 20:00:41.726 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 20:00:41.745 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 20:00:46.640 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 20:00:46.641 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 20:00:56.216 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 20:00:56.217 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 20:01:06.384 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 20:01:06.385 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 21:00:41.739 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 22:00:41.598 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-29 22:00:41.754 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-29 22:00:41.792 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-29 22:00:46.674 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-29 22:00:46.676 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-29 22:00:54.635 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-29 22:00:54.636 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-29 22:01:03.869 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-29 22:01:03.870 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-29 23:00:41.753 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 00:00:41.624 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 00:00:41.656 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 00:00:41.764 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 00:00:47.148 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 00:00:47.149 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 00:00:55.401 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 00:00:55.402 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 00:01:04.494 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 00:01:04.496 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 01:00:41.787 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 02:00:41.642 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 02:00:41.798 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 02:00:42.439 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 02:00:46.337 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 02:00:46.337 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 02:00:54.064 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 02:00:54.065 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 02:01:03.396 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 02:01:03.397 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 03:00:41.810 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 04:00:41.639 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 04:00:41.825 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 04:00:42.457 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 04:00:47.318 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 04:00:47.319 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 04:00:56.599 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 04:00:56.600 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 04:01:07.420 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 04:01:07.422 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 05:00:41.839 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 06:00:41.652 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 06:00:41.664 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 06:00:41.839 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 06:00:48.044 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 06:00:48.045 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 06:00:55.535 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 06:00:55.536 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 06:01:04.773 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 06:01:04.774 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 07:00:41.837 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 08:00:41.679 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 08:00:41.683 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 08:00:41.850 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 08:00:46.067 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 08:00:46.068 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 08:00:53.763 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 08:00:53.764 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 08:01:02.357 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 08:01:02.358 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 09:00:41.840 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 10:00:41.685 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 10:00:41.689 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 10:00:41.857 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 10:00:50.788 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 10:00:50.789 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 10:00:57.979 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 10:00:57.980 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 10:01:08.006 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 10:01:08.007 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 11:00:41.863 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 12:00:41.696 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 12:00:41.867 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 12:00:42.538 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 12:00:48.138 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 12:00:48.139 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 12:00:56.084 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 12:00:56.085 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 12:01:04.519 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 12:01:04.520 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 13:00:41.871 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 14:00:41.710 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 14:00:41.730 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 14:00:41.866 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 14:00:47.105 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 14:00:47.106 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 14:00:54.701 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 14:00:54.702 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 14:01:04.701 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 14:01:04.702 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 15:00:41.871 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 16:00:41.721 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 16:00:41.893 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 16:00:41.958 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 16:00:51.285 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 16:00:51.286 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 16:00:59.285 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 16:00:59.286 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 16:01:08.064 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 16:01:08.066 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 17:00:41.903 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 18:00:41.732 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 18:00:41.918 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 18:00:42.526 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 18:00:46.819 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 18:00:46.820 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 18:00:54.962 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 18:00:54.963 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 18:01:03.650 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 18:01:03.651 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 19:00:41.923 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 20:00:41.745 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 20:00:41.889 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 20:00:41.931 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 20:00:45.694 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 20:00:45.695 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 20:00:53.860 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 20:00:53.861 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 20:01:03.053 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 20:01:03.054 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-30 21:00:41.931 | INFO | cogs.vm_tracker:update_vm_embeds:84 - ---> Updating VM adverts +2023-07-30 22:00:41.748 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-30 22:00:41.770 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-30 22:00:45.494 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-30 22:00:45.496 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-30 22:00:53.313 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-30 22:00:53.314 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-30 22:01:02.175 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-30 22:01:02.176 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 00:00:41.742 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 00:00:41.747 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 00:00:46.326 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 00:00:46.327 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 00:00:53.879 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 00:00:53.880 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 00:01:02.260 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 00:01:02.261 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 02:00:41.778 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 02:00:41.832 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 02:00:47.638 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 02:00:47.639 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 02:00:55.349 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 02:00:55.353 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 02:01:04.276 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 02:01:04.278 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 04:00:41.775 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 04:00:41.936 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 04:00:48.063 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 04:00:48.063 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 04:00:55.917 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 04:00:55.917 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 04:01:04.984 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 04:01:04.986 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 06:00:41.779 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 06:00:41.891 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 06:00:46.565 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 06:00:46.566 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 06:00:54.144 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 06:00:54.145 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 06:01:02.752 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 06:01:02.753 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 08:00:41.781 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 08:00:41.787 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 08:00:47.255 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 08:00:47.256 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 08:00:54.919 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 08:00:54.920 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 08:01:03.555 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 08:01:03.556 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 10:00:41.792 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 10:00:41.835 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 10:00:45.695 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 10:00:45.696 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 10:00:54.056 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 10:00:54.057 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 10:01:03.848 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 10:01:03.850 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 10:44:46.204 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'ljtr', 'action': 'Remove'} +2023-07-31 12:00:41.795 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 12:00:41.801 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 12:00:46.489 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 12:00:46.489 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 12:00:54.058 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 12:00:54.059 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 12:01:02.647 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 12:01:02.649 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 14:00:41.807 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 14:00:41.811 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 14:00:48.285 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 14:00:48.286 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 14:00:55.999 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 14:00:56.000 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 14:01:04.817 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 14:01:04.819 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 16:00:41.814 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 16:00:41.861 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 16:00:52.759 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 16:00:52.760 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 16:01:00.348 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 16:01:00.349 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 16:01:09.112 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 16:01:09.113 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 18:00:41.822 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 18:00:41.841 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 18:00:49.853 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 18:00:49.854 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 18:00:58.192 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 18:00:58.193 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 18:01:06.794 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 18:01:06.795 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 20:00:41.824 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 20:00:41.833 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 20:00:48.252 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 20:00:48.253 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 20:00:55.837 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 20:00:55.838 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 20:01:04.849 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 20:01:04.851 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-07-31 22:00:41.840 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-07-31 22:00:42.030 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-07-31 22:00:47.987 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-07-31 22:00:47.988 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-07-31 22:00:56.900 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-07-31 22:00:56.902 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-07-31 22:01:06.109 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-07-31 22:01:06.110 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 00:00:41.841 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 00:00:42.486 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 00:00:48.428 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 00:00:48.428 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 00:00:56.023 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 00:00:56.024 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 00:01:04.524 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 00:01:04.525 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 02:00:41.864 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 02:00:42.116 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 02:00:47.190 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 02:00:47.191 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 02:00:55.002 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 02:00:55.003 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 02:01:03.524 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 02:01:03.525 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 04:00:41.872 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 04:00:42.357 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 04:00:47.876 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 04:00:47.878 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 04:00:56.127 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 04:00:56.128 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 04:01:05.139 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 04:01:05.140 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 06:00:41.874 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 06:00:42.009 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 06:00:46.284 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 06:00:46.285 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 06:00:53.966 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 06:00:53.967 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 06:01:02.564 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 06:01:02.565 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 08:00:41.869 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 08:00:41.925 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 08:00:45.907 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 08:00:45.908 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 08:00:53.636 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 08:00:53.637 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 08:01:02.467 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 08:01:02.469 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 08:05:05.090 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'fatcobra', 'action': 'Remove'} +2023-08-01 08:05:14.332 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'fatcobra'} +2023-08-01 08:05:21.607 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by roguedestroyer using {'in_game_name': 'fatcobra'} +2023-08-01 08:05:41.626 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'fatcobra', 'action': 'Adjust Start Time', 'value': 7} +2023-08-01 10:00:41.884 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 10:00:41.888 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 10:00:46.976 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 10:00:46.977 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 10:00:54.638 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 10:00:54.640 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 10:01:04.734 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 10:01:04.735 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 12:00:41.892 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 12:00:42.037 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 12:00:47.553 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 12:00:47.553 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 12:00:55.534 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 12:00:55.535 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 12:01:04.320 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 12:01:04.321 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 14:00:41.907 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 14:00:41.940 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 14:00:46.808 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 14:00:46.809 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 14:00:54.716 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 14:00:54.718 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 14:01:03.552 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 14:01:03.554 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 16:00:41.903 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 16:00:41.935 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 16:00:46.631 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 16:00:46.632 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 16:00:54.575 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 16:00:54.576 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 16:01:03.310 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 16:01:03.311 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 18:00:41.926 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 18:00:41.959 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 18:00:49.923 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 18:00:49.924 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 18:00:57.582 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 18:00:57.583 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 18:01:05.924 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 18:01:05.925 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 20:00:41.940 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 20:00:43.178 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 20:00:48.917 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 20:00:48.918 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 20:00:57.986 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 20:00:57.987 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 20:01:08.283 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 20:01:08.284 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-01 22:00:41.955 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-01 22:00:42.258 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-01 22:00:46.718 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-01 22:00:46.719 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-01 22:00:54.516 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-01 22:00:54.517 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-01 22:01:03.642 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-01 22:01:03.643 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 00:00:41.962 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 00:00:42.008 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 00:00:46.679 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 00:00:46.680 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 00:00:54.298 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 00:00:54.299 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 00:01:02.966 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 00:01:02.967 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 01:23:51.205 | INFO | cogs.vm_tracker:edit_vm:41 - edit_vm used by roguedestroyer using {'in_game_name': 'Sadon Brom', 'action': 'Remove'} +2023-08-02 02:00:41.969 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 02:00:41.975 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 02:00:46.750 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 02:00:46.751 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 02:00:54.477 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 02:00:54.480 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 02:01:03.220 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 02:01:03.221 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 04:00:41.960 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 04:00:41.996 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 04:00:47.721 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 04:00:47.721 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 04:00:55.539 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 04:00:55.540 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 04:01:04.413 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 04:01:04.414 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 06:00:41.982 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 06:00:41.986 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 06:00:48.101 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 06:00:48.102 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 06:00:55.898 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 06:00:55.899 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 06:01:04.976 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 06:01:04.978 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 07:46:06.583 | INFO | cogs.vm_tracker:report_vm:21 - report_vm used by urcracked#8261 using {'in_game_name': 'sandpiper'} +2023-08-02 08:00:41.995 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 08:00:41.998 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 08:00:45.933 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 08:00:45.934 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 08:00:53.989 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 08:00:53.990 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 08:01:02.770 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 08:01:02.771 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 10:00:41.990 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 10:00:41.993 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 10:00:46.222 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 10:00:46.223 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 10:00:54.472 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 10:00:54.473 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 10:01:04.263 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 10:01:04.264 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 12:00:41.996 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 12:00:42.001 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 12:00:47.495 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 12:00:47.496 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 12:00:55.237 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 12:00:55.239 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 12:01:04.153 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 12:01:04.154 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 14:00:42.002 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 14:00:42.007 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 14:00:46.990 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 14:00:46.991 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 14:00:54.602 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 14:00:54.604 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 14:01:03.304 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 14:01:03.306 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 16:00:42.017 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 16:00:42.021 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 16:00:46.859 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 16:00:46.860 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 16:00:54.610 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 16:00:54.611 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 16:01:03.429 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 16:01:03.430 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 18:00:42.033 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 18:00:42.187 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 18:00:47.252 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 18:00:47.253 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 18:00:55.053 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 18:00:55.054 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 18:01:04.158 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 18:01:04.159 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 20:00:42.041 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 20:00:42.046 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 20:00:47.741 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 20:00:47.741 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 20:00:55.458 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 20:00:55.459 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 20:01:04.180 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 20:01:04.181 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-02 22:00:42.046 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-02 22:00:42.051 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-02 22:00:46.931 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-02 22:00:46.932 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-02 22:00:54.682 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-02 22:00:54.684 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-02 22:01:03.833 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-02 22:01:03.834 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 00:00:42.051 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 00:00:42.055 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 00:00:47.857 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 00:00:47.858 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 00:00:55.072 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 00:00:55.073 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 00:01:03.953 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 00:01:03.954 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 02:00:43.221 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 02:00:43.225 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 02:00:47.073 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 02:00:47.073 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 02:00:54.861 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 02:00:54.862 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 02:01:03.478 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 02:01:03.479 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 04:00:43.243 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 04:00:43.261 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 04:00:48.194 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 04:00:48.196 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 04:00:55.719 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 04:00:55.720 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 04:01:04.460 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 04:01:04.462 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 06:00:43.258 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 06:00:43.479 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 06:00:49.011 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 06:00:49.012 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 06:00:56.778 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 06:00:56.780 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 06:01:05.830 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 06:01:05.831 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 08:00:43.251 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 08:00:43.255 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 08:00:47.801 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 08:00:47.802 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 08:00:55.441 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 08:00:55.441 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 08:01:04.485 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 08:01:04.486 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 10:00:43.259 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 10:00:43.262 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 10:00:48.501 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 10:00:48.502 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 10:00:56.183 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 10:00:56.184 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 10:01:09.556 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 10:01:09.557 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 12:00:43.266 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 12:00:43.270 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 12:00:47.996 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 12:00:47.997 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 12:00:55.735 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 12:00:55.736 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 12:01:04.528 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 12:01:04.529 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update +2023-08-03 14:00:43.281 | INFO | cogs.shk_info_tasks:getHouseData:23 - Starting House Data Update +2023-08-03 14:00:43.284 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for World 2 +2023-08-03 14:00:47.981 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished World 2 update +2023-08-03 14:00:47.982 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Global Conflict 8 +2023-08-03 14:00:55.414 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Global Conflict 8 update +2023-08-03 14:00:55.415 | INFO | cogs.shk_info_tasks:getHouseData:39 - Updating house info for Europe 8 +2023-08-03 14:01:04.334 | INFO | cogs.shk_info_tasks:getHouseData:58 - Finished Europe 8 update +2023-08-03 14:01:04.335 | INFO | cogs.shk_info_tasks:getHouseData:61 - Finished House Data update diff --git a/main.py b/main.py index 2725308..0e670ae 100644 --- a/main.py +++ b/main.py @@ -1,20 +1,105 @@ import disnake import os +import asyncio +import os +import sys +import traceback +import motor.motor_asyncio +from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase + + from dotenv import load_dotenv from disnake.ext import commands,tasks +from loguru import logger -#Get variables -load_dotenv('.env') +load_dotenv(".env", override=True) -intents = disnake.Intents().all() -bot = commands.InteractionBot( - sync_commands_debug=False, - intents=intents -) +INTENTS = disnake.Intents.all() +SYNC_FLAGS = commands.CommandSyncFlags.default() +TOKEN = os.getenv("BOT_TOKEN") -@bot.slash_command(description="Test Command") -async def test_command(inter:disnake.ApplicationCommandInteraction, string:str = "Cake is a lie"): - await inter.response.send_message(string) +class StronkBot(commands.InteractionBot): + """base bot instance""" + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) -bot.run(os.getenv('BOT_TOKEN')) \ No newline at end of file + self.db_client: AsyncIOMotorClient = motor.motor_asyncio.AsyncIOMotorClient('mongodb://sheriff:unnipus1213@192.168.1.109:27017/?retryWrites=true&serverSelectionTimeoutMS=5000&connectTimeoutMS=10000&authSource=stormbrigade&authMechanism=SCRAM-SHA-256', maxPoolSize=10) + self.database: AsyncIOMotorDatabase = self.db_client.stormbrigade + async def on_ready(self) -> None: + logger.info(f"Ready - using {disnake.__version__}") + + def load_extensions(self, path: str) -> None: + + for module in os.listdir(path): + name, ext = os.path.splitext(module) + + if "__" in name or ext != ".py": + continue + + extension = f"cogs.{name}" + + super().load_extension(extension) + logger.info(f"Cog loaded: {extension}") + +async def main() -> None: + """Constructs bot, load extensions, and starts bot""" + logger.add("info.log", rotation="500 MB") + bot = StronkBot(command_sync_flags=SYNC_FLAGS,intents=INTENTS, reload=True) + + @bot.slash_command(guild_ids=[934462932669038732],description="Reloads all the cogs in the bot") + async def reload(self,inter,cog:str = None): + if not cog: + for module in os.listdir("cogs/"): + name, ext = os.path.splitext(module) + + if "__" in name or ext != ".py": + continue + + extension = f"cogs.{name}" + + bot.reload_extension(extension) + logger.info(f"Cog reloaded: {extension}") + + await inter.send("Reloaded all cogs") + else: + extension = f"cogs.{cog}" + bot.reload_extension(extension) + logger.info(f"Cog Reloading: {extension}") + await inter.send(f"Reloaded - {extension}") + + @bot.slash_command(guild_ids=[934462932669038732],description="Add a new cog") + async def load(self,inter:disnake.ApplicationCommandInteraction,cog:str): + try: + bot.load_extension(f"cogs.{cog}") + logger.info(f"Loading: cogs.{cog}") + await inter.send(f"Loaded: cogs.{cog}") + except Exception as e: + logger.exception(f"Failed:{traceback.format_exception(e)}") + await inter.send(f"Failed:{traceback.format_exception(e)}") + + @bot.slash_command(guild_ids=[934462932669038732],description="Unload a cog") + async def unload(self,inter:disnake.ApplicationCommandInteraction,cog:str): + try: + bot.unload_extension(f"cogs.{cog}") + logger.info(f"Unloading: cogs.{cog}") + await inter.send(f"Unloaded: cogs.{cog}") + except Exception as e: + logger.exception(f"Failed:{traceback.format_exception(e)}") + await inter.send(f"Failed:{traceback.format_exception(e)}") + + @reload.autocomplete("cog") + async def autocomp_input(self, inter:disnake.ApplicationCommandInteraction,user_input:str): + return [x[:-3] for x in os.listdir("cogs/") if x.endswith(".py")] + + try: + bot.load_extensions("cogs/") + except Exception: + await bot.close() + raise + + logger.info("Starting bot") + await bot.start(TOKEN or "") + +if __name__ == "__main__": + sys.exit(asyncio.run(main())) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index d63b94a..60756a8 100644 Binary files a/requirements.txt and b/requirements.txt differ diff --git a/resources/StormBrigade_White.png b/resources/StormBrigade_White.png new file mode 100644 index 0000000..cd6bf6a Binary files /dev/null and b/resources/StormBrigade_White.png differ diff --git a/resources/house_sprites/1.png b/resources/house_sprites/1.png new file mode 100644 index 0000000..7ec02d3 Binary files /dev/null and b/resources/house_sprites/1.png differ diff --git a/resources/house_sprites/10.png b/resources/house_sprites/10.png new file mode 100644 index 0000000..6445df9 Binary files /dev/null and b/resources/house_sprites/10.png differ diff --git a/resources/house_sprites/11.png b/resources/house_sprites/11.png new file mode 100644 index 0000000..d73adfc Binary files /dev/null and b/resources/house_sprites/11.png differ diff --git a/resources/house_sprites/12.png b/resources/house_sprites/12.png new file mode 100644 index 0000000..f77e516 Binary files /dev/null and b/resources/house_sprites/12.png differ diff --git a/resources/house_sprites/13.png b/resources/house_sprites/13.png new file mode 100644 index 0000000..b983202 Binary files /dev/null and b/resources/house_sprites/13.png differ diff --git a/resources/house_sprites/14.png b/resources/house_sprites/14.png new file mode 100644 index 0000000..0777095 Binary files /dev/null and b/resources/house_sprites/14.png differ diff --git a/resources/house_sprites/15.png b/resources/house_sprites/15.png new file mode 100644 index 0000000..554e383 Binary files /dev/null and b/resources/house_sprites/15.png differ diff --git a/resources/house_sprites/16.png b/resources/house_sprites/16.png new file mode 100644 index 0000000..b7fffe1 Binary files /dev/null and b/resources/house_sprites/16.png differ diff --git a/resources/house_sprites/17.png b/resources/house_sprites/17.png new file mode 100644 index 0000000..a4a6b3d Binary files /dev/null and b/resources/house_sprites/17.png differ diff --git a/resources/house_sprites/18.png b/resources/house_sprites/18.png new file mode 100644 index 0000000..3f8c0ae Binary files /dev/null and b/resources/house_sprites/18.png differ diff --git a/resources/house_sprites/19.png b/resources/house_sprites/19.png new file mode 100644 index 0000000..cc0701c Binary files /dev/null and b/resources/house_sprites/19.png differ diff --git a/resources/house_sprites/2.png b/resources/house_sprites/2.png new file mode 100644 index 0000000..2debc76 Binary files /dev/null and b/resources/house_sprites/2.png differ diff --git a/resources/house_sprites/20.png b/resources/house_sprites/20.png new file mode 100644 index 0000000..55cc58f Binary files /dev/null and b/resources/house_sprites/20.png differ diff --git a/resources/house_sprites/3.png b/resources/house_sprites/3.png new file mode 100644 index 0000000..d7bc359 Binary files /dev/null and b/resources/house_sprites/3.png differ diff --git a/resources/house_sprites/4.png b/resources/house_sprites/4.png new file mode 100644 index 0000000..8e77952 Binary files /dev/null and b/resources/house_sprites/4.png differ diff --git a/resources/house_sprites/5.png b/resources/house_sprites/5.png new file mode 100644 index 0000000..2a968f4 Binary files /dev/null and b/resources/house_sprites/5.png differ diff --git a/resources/house_sprites/6.png b/resources/house_sprites/6.png new file mode 100644 index 0000000..f90fa5c Binary files /dev/null and b/resources/house_sprites/6.png differ diff --git a/resources/house_sprites/7.png b/resources/house_sprites/7.png new file mode 100644 index 0000000..41561b1 Binary files /dev/null and b/resources/house_sprites/7.png differ diff --git a/resources/house_sprites/8.png b/resources/house_sprites/8.png new file mode 100644 index 0000000..8f34c07 Binary files /dev/null and b/resources/house_sprites/8.png differ diff --git a/resources/house_sprites/9.png b/resources/house_sprites/9.png new file mode 100644 index 0000000..dca75a9 Binary files /dev/null and b/resources/house_sprites/9.png differ diff --git a/sb_emojis.py b/sb_emojis.py new file mode 100644 index 0000000..4bd759f --- /dev/null +++ b/sb_emojis.py @@ -0,0 +1,39 @@ +embed_fillers = { + "no_data":"<:no_data:1020809240648101978>", + "banned": "<:banned:1058830988391764118>", + "banned_before": "<:banned_before:1058830989838786632>", + "vm_active": "<:vm_active:1058829589524271146>", + "vm_logged": "<:vm_logged:1058829384577974322>" +} + +houseEmojis = { + 1:"<:H1:947634558931697685>", + 2:"<:H2:947634559116255262>", + 3:"<:H3:947634558789115956>", + 4:"<:H4:947543041747595296>", + 5:"<:H5:947634559200137287>", + 6:"<:H6:947479700215853096>", + 7:"<:H7:947634559174983770>", + 8:"<:H8:947634559204331590>", + 9:"<:H9:947634559149826058>", + 10:"<:H10:947634559074328607>", + 11:"<:H11:947634559154028554>", + 12:"<:H12:947634558977843231>", + 13:"<:H13:947634559334363186>", + 14:"<:H14:947634558906536020>", + 15:"<:H15:947479684197777409>", + 16:"<:H16:947479656452460545>", + 17:"<:H17:947634559162413086>", + 18:"<:H18:947634559233716264>", + 19:"<:H19:947634558982049803>", + 20:"<:H20:947634559028199466>" +} + +relationshipEmojis = { + 'Enemy':'<:Enemy_house_faction:947479557982781460>', + 'Allied':"<:Faction_ally:947479253157560320>", + 'Faction':"<:Faction_member:947479527624425492>", + 'Peace': "<:truce:1017037932034535495>", + 'Rogue': "<:UglyJester:947549417500729434>", + 'Neutral':'' +} \ No newline at end of file diff --git a/sb_utils.py b/sb_utils.py new file mode 100644 index 0000000..6aa24b8 --- /dev/null +++ b/sb_utils.py @@ -0,0 +1,45 @@ +import asyncio +import random +from typing import TypedDict +import db +from httpx import Client +from sqlalchemy.orm import Session +import datetime + +Player_ban_data = TypedDict("Player_ban_data",{"player_name":str, "banned":bool, "permanent":bool}) + +async def check_banned_player(db_session:Session,httpx_session:Client,player_name:str) -> Player_ban_data: + return_dict = {"player_name":player_name,"permanent":False,"banned":None} + player:db.Player = db.get_or_create(db_session,db.Player,player_name = player_name)[0] + player_ban_query:db.PlayerBan = db_session.query(db.PlayerBan).filter(db.PlayerBan.player_id == player.id).filter(db.PlayerBan.unbanned == False).one_or_none() + if player_ban_query is None: + await asyncio.sleep(0.01+random.random()*0.5) + api_request = await httpx_session.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={player_name}') + api_request = api_request.json() + if not api_request: #Request returned empty, so player_name is banned + new_player_ban = db.PlayerBan(player_id = player.id) + db_session.add(new_player_ban) + db_session.commit() + return_dict["banned"] = True + return return_dict + else: #Player returned by SHK servers, so not banned + return return_dict + else: #Player found in PlayerBan, evaluate if its permanent then return + return_dict["banned"] = True + elapsed_time = datetime.datetime.now() - player_ban_query.date_added + if elapsed_time.days >= 3: + player_ban_query.last_check = datetime.datetime.now() + api_request = await httpx_session.get(f'http://login.strongholdkingdoms.com/ajaxphp/username_search.php?term={player_name}') + api_request = api_request.json() + if api_request: #Player returned by SHK servers, so not banned, flag the entry as unbanned + player_ban_query.unbanned = True + if elapsed_time.days() >= 14: + player_ban_query.permanent = True + db_session.commit() + return_dict["banned"] = not player_ban_query.unbanned + return_dict["permanent"] = player_ban_query.permanent + return return_dict + + + + \ No newline at end of file diff --git a/sbsheriff.sqlite b/sbsheriff.sqlite new file mode 100644 index 0000000..6a69ee2 Binary files /dev/null and b/sbsheriff.sqlite differ diff --git a/sbsheriff/Lib/site-packages/PIL/BdfFontFile.py b/sbsheriff/Lib/site-packages/PIL/BdfFontFile.py new file mode 100644 index 0000000..102b72e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/BdfFontFile.py @@ -0,0 +1,110 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +Parse X Bitmap Distribution Format (BDF) +""" + + +from . import FontFile, Image + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other", +} + +bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode("ascii") + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d - y, x + l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +class BdfFontFile(FontFile.FontFile): + """Font file plugin for the X11 BDF format.""" + + def __init__(self, fp): + super().__init__() + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i + 1 : -1].decode("ascii")) + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/sbsheriff/Lib/site-packages/PIL/BlpImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/BlpImagePlugin.py new file mode 100644 index 0000000..5339977 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/BlpImagePlugin.py @@ -0,0 +1,484 @@ +""" +Blizzard Mipmap Format (.blp) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +BLP1 files, used mostly in Warcraft III, are not fully supported. +All types of BLP2 files used in World of Warcraft are supported. + +The BLP file structure consists of a header, up to 16 mipmaps of the +texture + +Texture sizes must be powers of two, though the two dimensions do +not have to be equal; 512x256 is valid, but 512x200 is not. +The first mipmap (mipmap #0) is the full size image; each subsequent +mipmap halves both dimensions. The final mipmap should be 1x1. + +BLP files come in many different flavours: +* JPEG-compressed (type == 0) - only supported for BLP1. +* RAW images (type == 1, encoding == 1). Each mipmap is stored as an + array of 8-bit values, one per pixel, left to right, top to bottom. + Each value is an index to the palette. +* DXT-compressed (type == 1, encoding == 2): +- DXT1 compression is used if alpha_encoding == 0. + - An additional alpha bit is used if alpha_depth == 1. + - DXT3 compression is used if alpha_encoding == 1. + - DXT5 compression is used if alpha_encoding == 7. +""" + +import os +import struct +from enum import IntEnum +from io import BytesIO + +from . import Image, ImageFile +from ._deprecate import deprecate + + +class Format(IntEnum): + JPEG = 0 + + +class Encoding(IntEnum): + UNCOMPRESSED = 1 + DXT = 2 + UNCOMPRESSED_RAW_BGRA = 3 + + +class AlphaEncoding(IntEnum): + DXT1 = 0 + DXT3 = 1 + DXT5 = 7 + + +def __getattr__(name): + for enum, prefix in { + Format: "BLP_FORMAT_", + Encoding: "BLP_ENCODING_", + AlphaEncoding: "BLP_ALPHA_ENCODING_", + }.items(): + if name.startswith(prefix): + name = name[len(prefix) :] + if name in enum.__members__: + deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +def unpack_565(i): + return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3 + + +def decode_dxt1(data, alpha=False): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 8 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + # Decode next 8-byte block. + idx = block * 8 + color0, color1, bits = struct.unpack_from("> 2 + + a = 0xFF + if control == 0: + r, g, b = r0, g0, b0 + elif control == 1: + r, g, b = r1, g1, b1 + elif control == 2: + if color0 > color1: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + else: + r = (r0 + r1) // 2 + g = (g0 + g1) // 2 + b = (b0 + b1) // 2 + elif control == 3: + if color0 > color1: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + else: + r, g, b, a = 0, 0, 0, 0 + + if alpha: + ret[j].extend([r, g, b, a]) + else: + ret[j].extend([r, g, b]) + + return ret + + +def decode_dxt3(data): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + bits = struct.unpack_from("<8B", block) + color0, color1 = struct.unpack_from(">= 4 + else: + high = True + a &= 0xF + a *= 17 # We get a value between 0 and 15 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +def decode_dxt5(data): + """ + input: one "row" of data (i.e. will produce 4 * width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + a0, a1 = struct.unpack_from("> alphacode_index) & 0x07 + elif alphacode_index == 15: + alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06) + else: # alphacode_index >= 18 and alphacode_index <= 45 + alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07 + + if alphacode == 0: + a = a0 + elif alphacode == 1: + a = a1 + elif a0 > a1: + a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7 + elif alphacode == 6: + a = 0 + elif alphacode == 7: + a = 255 + else: + a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +class BLPFormatError(NotImplementedError): + pass + + +def _accept(prefix): + return prefix[:4] in (b"BLP1", b"BLP2") + + +class BlpImageFile(ImageFile.ImageFile): + """ + Blizzard Mipmap Format + """ + + format = "BLP" + format_description = "Blizzard Mipmap Format" + + def _open(self): + self.magic = self.fp.read(4) + + self.fp.seek(5, os.SEEK_CUR) + (self._blp_alpha_depth,) = struct.unpack(" mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +def _dib_accept(prefix): + return i32(prefix) in [12, 40, 64, 108, 124] + + +# ============================================================================= +# Image plugin for the Windows BMP format. +# ============================================================================= +class BmpImageFile(ImageFile.ImageFile): + """Image plugin for the Windows Bitmap format (BMP)""" + + # ------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + + # -------------------------------------------------- BMP Compression values + COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5} + for k, v in COMPRESSIONS.items(): + vars()[k] = v + + def _bitmap(self, header=0, offset=0): + """Read relevant info about the BMP""" + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + # read bmp header size @offset 14 (this is part of the header size) + file_info = {"header_size": i32(read(4)), "direction": -1} + + # -------------------- If requested, read header at a specific position + # read the rest of the bmp header, without its size + header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) + + # -------------------------------------------------- IBM OS/2 Bitmap v1 + # ----- This format has different offsets because of width/height types + if file_info["header_size"] == 12: + file_info["width"] = i16(header_data, 0) + file_info["height"] = i16(header_data, 2) + file_info["planes"] = i16(header_data, 4) + file_info["bits"] = i16(header_data, 6) + file_info["compression"] = self.RAW + file_info["palette_padding"] = 3 + + # --------------------------------------------- Windows Bitmap v2 to v5 + # v3, OS/2 v2, v4, v5 + elif file_info["header_size"] in (40, 64, 108, 124): + file_info["y_flip"] = header_data[7] == 0xFF + file_info["direction"] = 1 if file_info["y_flip"] else -1 + file_info["width"] = i32(header_data, 0) + file_info["height"] = ( + i32(header_data, 4) + if not file_info["y_flip"] + else 2**32 - i32(header_data, 4) + ) + file_info["planes"] = i16(header_data, 8) + file_info["bits"] = i16(header_data, 10) + file_info["compression"] = i32(header_data, 12) + # byte size of pixel data + file_info["data_size"] = i32(header_data, 16) + file_info["pixels_per_meter"] = ( + i32(header_data, 20), + i32(header_data, 24), + ) + file_info["colors"] = i32(header_data, 28) + file_info["palette_padding"] = 4 + self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"]) + if file_info["compression"] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate( + ["r_mask", "g_mask", "b_mask", "a_mask"] + ): + file_info[mask] = i32(header_data, 36 + idx * 4) + else: + # 40 byte headers only have the three components in the + # bitfields masks, ref: + # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also + # https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha + # location, but it is listed as a reserved component, + # and it is not generally an alpha channel + file_info["a_mask"] = 0x0 + for mask in ["r_mask", "g_mask", "b_mask"]: + file_info[mask] = i32(read(4)) + file_info["rgb_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + ) + file_info["rgba_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + file_info["a_mask"], + ) + else: + raise OSError(f"Unsupported BMP header type ({file_info['header_size']})") + + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self._size = file_info["width"], file_info["height"] + + # ------- If color count was not found in the header, compute from bits + file_info["colors"] = ( + file_info["colors"] + if file_info.get("colors", 0) + else (1 << file_info["bits"]) + ) + if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8: + offset += 4 * file_info["colors"] + + # ---------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) + if self.mode is None: + raise OSError(f"Unsupported BMP pixel depth ({file_info['bits']})") + + # ---------------- Process BMP with Bitfields compression (not palette) + decoder_name = "raw" + if file_info["compression"] == self.BITFIELDS: + SUPPORTED = { + 32: [ + (0xFF0000, 0xFF00, 0xFF, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0xFF), + (0xFF, 0xFF00, 0xFF0000, 0xFF000000), + (0xFF0000, 0xFF00, 0xFF, 0xFF000000), + (0x0, 0x0, 0x0, 0x0), + ], + 24: [(0xFF0000, 0xFF00, 0xFF)], + 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], + } + MASK_MODES = { + (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR", + (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", + (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", + (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", + (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", + } + if file_info["bits"] in SUPPORTED: + if ( + file_info["bits"] == 32 + and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] + self.mode = "RGBA" if "A" in raw_mode else self.mode + elif ( + file_info["bits"] in (24, 16) + and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] + else: + raise OSError("Unsupported BMP bitfields layout") + else: + raise OSError("Unsupported BMP bitfields layout") + elif file_info["compression"] == self.RAW: + if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + elif file_info["compression"] in (self.RLE8, self.RLE4): + decoder_name = "bmp_rle" + else: + raise OSError(f"Unsupported BMP compression ({file_info['compression']})") + + # --------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + + # ---------------------------------------------------- 1-bit images + if not (0 < file_info["colors"] <= 65536): + raise OSError(f"Unsupported BMP Palette size ({file_info['colors']})") + else: + padding = file_info["palette_padding"] + palette = read(padding * file_info["colors"]) + greyscale = True + indices = ( + (0, 255) + if file_info["colors"] == 2 + else list(range(file_info["colors"])) + ) + + # ----------------- Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind * padding : ind * padding + 3] + if rgb != o8(val) * 3: + greyscale = False + + # ------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info["colors"] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw( + "BGRX" if padding == 4 else "BGR", palette + ) + + # ---------------------------- Finally set the tile data for the plugin + self.info["compression"] = file_info["compression"] + args = [raw_mode] + if decoder_name == "bmp_rle": + args.append(file_info["compression"] == self.RLE4) + else: + args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3)) + args.append(file_info["direction"]) + self.tile = [ + ( + decoder_name, + (0, 0, file_info["width"], file_info["height"]), + offset or self.fp.tell(), + tuple(args), + ) + ] + + def _open(self): + """Open file, check magic number and read header""" + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if not _accept(head_data): + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data, 10) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +class BmpRleDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer): + rle4 = self.args[1] + data = bytearray() + x = 0 + while len(data) < self.state.xsize * self.state.ysize: + pixels = self.fd.read(1) + byte = self.fd.read(1) + if not pixels or not byte: + break + num_pixels = pixels[0] + if num_pixels: + # encoded mode + if x + num_pixels > self.state.xsize: + # Too much data for row + num_pixels = max(0, self.state.xsize - x) + if rle4: + first_pixel = o8(byte[0] >> 4) + second_pixel = o8(byte[0] & 0x0F) + for index in range(num_pixels): + if index % 2 == 0: + data += first_pixel + else: + data += second_pixel + else: + data += byte * num_pixels + x += num_pixels + else: + if byte[0] == 0: + # end of line + while len(data) % self.state.xsize != 0: + data += b"\x00" + x = 0 + elif byte[0] == 1: + # end of bitmap + break + elif byte[0] == 2: + # delta + bytes_read = self.fd.read(2) + if len(bytes_read) < 2: + break + right, up = self.fd.read(2) + data += b"\x00" * (right + up * self.state.xsize) + x = len(data) % self.state.xsize + else: + # absolute mode + if rle4: + # 2 pixels per byte + byte_count = byte[0] // 2 + bytes_read = self.fd.read(byte_count) + for byte_read in bytes_read: + data += o8(byte_read >> 4) + data += o8(byte_read & 0x0F) + else: + byte_count = byte[0] + bytes_read = self.fd.read(byte_count) + data += bytes_read + if len(bytes_read) < byte_count: + break + x += byte[0] + + # align to 16-bit word boundary + if self.fd.tell() % 2 != 0: + self.fd.seek(1, os.SEEK_CUR) + rawmode = "L" if self.mode == "L" else "P" + self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1])) + return -1, 0 + + +# ============================================================================= +# Image plugin for the DIB format (BMP alias) +# ============================================================================= +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + + +# +# -------------------------------------------------------------------- +# Write BMP file + + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _dib_save(im, fp, filename): + _save(im, fp, filename, False) + + +def _save(im, fp, filename, bitmap_header=True): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as BMP") from e + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701 + 0.5), dpi)) + + stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3) + header = 40 # or 64 for OS/2 version 2 + image = stride * im.size[1] + + if im.mode == "1": + palette = b"".join(o8(i) * 4 for i in (0, 255)) + elif im.mode == "L": + palette = b"".join(o8(i) * 4 for i in range(256)) + elif im.mode == "P": + palette = im.im.getpalette("RGB", "BGRX") + colors = len(palette) // 4 + else: + palette = None + + # bitmap header + if bitmap_header: + offset = 14 + header + colors * 4 + file_size = offset + image + if file_size > 2**32 - 1: + raise ValueError("File size is too large for the BMP format") + fp.write( + b"BM" # file type (magic) + + o32(file_size) # file size + + o32(0) # reserved + + o32(offset) # image data offset + ) + + # bitmap info header + fp.write( + o32(header) # info header size + + o32(im.size[0]) # width + + o32(im.size[1]) # height + + o16(1) # planes + + o16(bits) # depth + + o32(0) # compression (0=uncompressed) + + o32(image) # size of bitmap + + o32(ppm[0]) # resolution + + o32(ppm[1]) # resolution + + o32(colors) # colors used + + o32(colors) # colors important + ) + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if palette: + fp.write(palette) + + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") + +Image.register_decoder("bmp_rle", BmpRleDecoder) + +Image.register_open(DibImageFile.format, DibImageFile, _dib_accept) +Image.register_save(DibImageFile.format, _dib_save) + +Image.register_extension(DibImageFile.format, ".dib") + +Image.register_mime(DibImageFile.format, "image/bmp") diff --git a/sbsheriff/Lib/site-packages/PIL/BufrStubImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..9510f73 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/BufrStubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific BUFR image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(4)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise OSError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/sbsheriff/Lib/site-packages/PIL/ContainerIO.py b/sbsheriff/Lib/site-packages/PIL/ContainerIO.py new file mode 100644 index 0000000..45e80b3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ContainerIO.py @@ -0,0 +1,120 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +import io + + +class ContainerIO: + """ + A file object that provides read access to a part of an existing + file (for example a TAR file). + """ + + def __init__(self, file, offset, length): + """ + Create file object. + + :param file: Existing file. + :param offset: Start of region, in bytes. + :param length: Size of region, in bytes. + """ + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return False + + def seek(self, offset, mode=io.SEEK_SET): + """ + Move file pointer. + + :param offset: Offset in bytes. + :param mode: Starting position. Use 0 for beginning of region, 1 + for current offset, and 2 for end of region. You cannot move + the pointer outside the defined region. + """ + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + def tell(self): + """ + Get current file pointer. + + :returns: Offset from start of region, in bytes. + """ + return self.pos + + def read(self, n=0): + """ + Read data. + + :param n: Number of bytes to read. If omitted or zero, + read until end of region. + :returns: An 8-bit string. + """ + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return b"" if "b" in self.fh.mode else "" + self.pos = self.pos + n + return self.fh.read(n) + + def readline(self): + """ + Read a line of text. + + :returns: An 8-bit string. + """ + s = b"" if "b" in self.fh.mode else "" + newline_character = b"\n" if "b" in self.fh.mode else "\n" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == newline_character: + break + return s + + def readlines(self): + """ + Read multiple lines of text. + + :returns: A list of 8-bit strings. + """ + lines = [] + while True: + s = self.readline() + if not s: + break + lines.append(s) + return lines diff --git a/sbsheriff/Lib/site-packages/PIL/CurImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/CurImagePlugin.py new file mode 100644 index 0000000..42af5ca --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/CurImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from . import BmpImagePlugin, Image +from ._binary import i16le as i16 +from ._binary import i32le as i32 + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s, 4)): + s = self.fp.read(16) + if not m: + m = s + elif s[0] > m[0] and s[1] > m[1]: + m = s + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m, 12) + offset) + + # patch up the bitmap height + self._size = self.size[0], self.size[1] // 2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0) + self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/sbsheriff/Lib/site-packages/PIL/DcxImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..aeed1e7 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/DcxImagePlugin.py @@ -0,0 +1,80 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._binary import i32le as i32 +from .PcxImagePlugin import PcxImageFile + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Header + s = self.fp.read(4) + if not _accept(s): + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self._fp = self.fp + self.frame = None + self.n_frames = len(self._offset) + self.is_animated = self.n_frames > 1 + self.seek(0) + + def seek(self, frame): + if not self._seek_check(frame): + return + self.frame = frame + self.fp = self._fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/sbsheriff/Lib/site-packages/PIL/DdsImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/DdsImagePlugin.py new file mode 100644 index 0000000..eea6e31 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/DdsImagePlugin.py @@ -0,0 +1,267 @@ +""" +A Pillow loader for .dds files (S3TC-compressed aka DXTC) +Jerome Leclanche + +Documentation: + https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ +""" + +import struct +from io import BytesIO + +from . import Image, ImageFile +from ._binary import o32le as o32 + +# Magic ("DDS ") +DDS_MAGIC = 0x20534444 + +# DDS flags +DDSD_CAPS = 0x1 +DDSD_HEIGHT = 0x2 +DDSD_WIDTH = 0x4 +DDSD_PITCH = 0x8 +DDSD_PIXELFORMAT = 0x1000 +DDSD_MIPMAPCOUNT = 0x20000 +DDSD_LINEARSIZE = 0x80000 +DDSD_DEPTH = 0x800000 + +# DDS caps +DDSCAPS_COMPLEX = 0x8 +DDSCAPS_TEXTURE = 0x1000 +DDSCAPS_MIPMAP = 0x400000 + +DDSCAPS2_CUBEMAP = 0x200 +DDSCAPS2_CUBEMAP_POSITIVEX = 0x400 +DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800 +DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000 +DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000 +DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000 +DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000 +DDSCAPS2_VOLUME = 0x200000 + +# Pixel Format +DDPF_ALPHAPIXELS = 0x1 +DDPF_ALPHA = 0x2 +DDPF_FOURCC = 0x4 +DDPF_PALETTEINDEXED8 = 0x20 +DDPF_RGB = 0x40 +DDPF_LUMINANCE = 0x20000 + + +# dds.h + +DDS_FOURCC = DDPF_FOURCC +DDS_RGB = DDPF_RGB +DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS +DDS_LUMINANCE = DDPF_LUMINANCE +DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS +DDS_ALPHA = DDPF_ALPHA +DDS_PAL8 = DDPF_PALETTEINDEXED8 + +DDS_HEADER_FLAGS_TEXTURE = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT +DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT +DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH +DDS_HEADER_FLAGS_PITCH = DDSD_PITCH +DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE + +DDS_HEIGHT = DDSD_HEIGHT +DDS_WIDTH = DDSD_WIDTH + +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX + +DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX +DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ + + +# DXT1 +DXT1_FOURCC = 0x31545844 + +# DXT3 +DXT3_FOURCC = 0x33545844 + +# DXT5 +DXT5_FOURCC = 0x35545844 + + +# dxgiformat.h + +DXGI_FORMAT_R8G8B8A8_TYPELESS = 27 +DXGI_FORMAT_R8G8B8A8_UNORM = 28 +DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = 29 +DXGI_FORMAT_BC5_TYPELESS = 82 +DXGI_FORMAT_BC5_UNORM = 83 +DXGI_FORMAT_BC5_SNORM = 84 +DXGI_FORMAT_BC6H_UF16 = 95 +DXGI_FORMAT_BC6H_SF16 = 96 +DXGI_FORMAT_BC7_TYPELESS = 97 +DXGI_FORMAT_BC7_UNORM = 98 +DXGI_FORMAT_BC7_UNORM_SRGB = 99 + + +class DdsImageFile(ImageFile.ImageFile): + format = "DDS" + format_description = "DirectDraw Surface" + + def _open(self): + if not _accept(self.fp.read(4)): + raise SyntaxError("not a DDS file") + (header_size,) = struct.unpack(" 0: + s = fp.read(min(lengthfile, 100 * 1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + device = "pngalpha" if transparency else "ppmraw" + + # Build Ghostscript command + command = [ + "gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dBATCH", # exit after processing + "-dNOPAUSE", # don't pause between pages + "-dSAFER", # safe mode + f"-sDEVICE={device}", + f"-sOutputFile={outfile}", # output file + # adjust for image origin + "-c", + f"{-bbox[0]} {-bbox[1]} translate", + "-f", + infile, # input file + # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272) + "-c", + "showpage", + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise OSError("Unable to locate Ghostscript on paths") + command[0] = gs_windows_binary + + # push data through Ghostscript + try: + startupinfo = None + if sys.platform.startswith("win"): + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.check_call(command, startupinfo=startupinfo) + out_im = Image.open(outfile) + out_im.load() + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + im = out_im.im.copy() + out_im.close() + return im + + +class PSFile: + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=io.SEEK_SET): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = [self.char or b""] + self.char = None + + c = self.fp.read(1) + while (c not in b"\r\n") and len(c): + s.append(c) + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return b"".join(s).decode("latin-1") + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + + +## +# Image plugin for Encapsulated PostScript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self._size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s_raw = fp.readline() + s = s_raw.strip("\r\n") + + while s_raw: + if s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error as e: + raise SyntaxError("not an EPS file") from e + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self._size = box[2] - box[0], box[3] - box[1] + self.tile = [ + ("eps", (0, 0) + self.size, offset, (length, box)) + ] + except Exception: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == "%": + # handle non-DSC PostScript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise OSError("bad EPS header") + + s_raw = fp.readline() + s = s_raw.strip("\r\n") + + if s and s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) == 1: + self.mode = "1" + elif int(bi) == 8: + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + else: + break + + self._size = int(x), int(y) + return + + s = fp.readline().strip("\r\n") + if not s: + break + + if not box: + raise OSError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, io.SEEK_END) + length = fp.tell() + offset = 0 + elif i32(s, 0) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s, 4) + length = i32(s, 8) + else: + raise SyntaxError("not an EPS file") + + return length, offset + + def load(self, scale=1, transparency=False): + # Load EPS via Ghostscript + if self.tile: + self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency) + self.mode = self.im.mode + self._size = self.im.size + self.tile = [] + return Image.Image.load(self) + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine PostScript image mode + if im.mode == "L": + operator = (8, 1, b"image") + elif im.mode == "RGB": + operator = (8, 3, b"false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, b"false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + if eps: + # + # write EPS header + fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write(b"%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write(b"%%Pages: 1\n") + fp.write(b"%%EndComments\n") + fp.write(b"%%Page: 1 1\n") + fp.write(b"%%ImageData: %d %d " % im.size) + fp.write(b'%d %d 0 1 1 "%s"\n' % operator) + + # + # image header + fp.write(b"gsave\n") + fp.write(b"10 dict begin\n") + fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write(b"%d %d scale\n" % im.size) + fp.write(b"%d %d 8\n" % im.size) # <= bits + fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write(b"{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + b"\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)]) + + fp.write(b"\n%%%%EndBinary\n") + fp.write(b"grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extensions(EpsImageFile.format, [".ps", ".eps"]) + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/sbsheriff/Lib/site-packages/PIL/ExifTags.py b/sbsheriff/Lib/site-packages/PIL/ExifTags.py new file mode 100644 index 0000000..f3a73bf --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ExifTags.py @@ -0,0 +1,340 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +""" +This module provides constants and clear-text names for various +well-known EXIF tags. +""" + +from enum import IntEnum + + +class Base(IntEnum): + # possibly incomplete + InteropIndex = 0x0001 + ProcessingSoftware = 0x000B + NewSubfileType = 0x00FE + SubfileType = 0x00FF + ImageWidth = 0x0100 + ImageLength = 0x0101 + BitsPerSample = 0x0102 + Compression = 0x0103 + PhotometricInterpretation = 0x0106 + Thresholding = 0x0107 + CellWidth = 0x0108 + CellLength = 0x0109 + FillOrder = 0x010A + DocumentName = 0x010D + ImageDescription = 0x010E + Make = 0x010F + Model = 0x0110 + StripOffsets = 0x0111 + Orientation = 0x0112 + SamplesPerPixel = 0x0115 + RowsPerStrip = 0x0116 + StripByteCounts = 0x0117 + MinSampleValue = 0x0118 + MaxSampleValue = 0x0119 + XResolution = 0x011A + YResolution = 0x011B + PlanarConfiguration = 0x011C + PageName = 0x011D + FreeOffsets = 0x0120 + FreeByteCounts = 0x0121 + GrayResponseUnit = 0x0122 + GrayResponseCurve = 0x0123 + T4Options = 0x0124 + T6Options = 0x0125 + ResolutionUnit = 0x0128 + PageNumber = 0x0129 + TransferFunction = 0x012D + Software = 0x0131 + DateTime = 0x0132 + Artist = 0x013B + HostComputer = 0x013C + Predictor = 0x013D + WhitePoint = 0x013E + PrimaryChromaticities = 0x013F + ColorMap = 0x0140 + HalftoneHints = 0x0141 + TileWidth = 0x0142 + TileLength = 0x0143 + TileOffsets = 0x0144 + TileByteCounts = 0x0145 + SubIFDs = 0x014A + InkSet = 0x014C + InkNames = 0x014D + NumberOfInks = 0x014E + DotRange = 0x0150 + TargetPrinter = 0x0151 + ExtraSamples = 0x0152 + SampleFormat = 0x0153 + SMinSampleValue = 0x0154 + SMaxSampleValue = 0x0155 + TransferRange = 0x0156 + ClipPath = 0x0157 + XClipPathUnits = 0x0158 + YClipPathUnits = 0x0159 + Indexed = 0x015A + JPEGTables = 0x015B + OPIProxy = 0x015F + JPEGProc = 0x0200 + JpegIFOffset = 0x0201 + JpegIFByteCount = 0x0202 + JpegRestartInterval = 0x0203 + JpegLosslessPredictors = 0x0205 + JpegPointTransforms = 0x0206 + JpegQTables = 0x0207 + JpegDCTables = 0x0208 + JpegACTables = 0x0209 + YCbCrCoefficients = 0x0211 + YCbCrSubSampling = 0x0212 + YCbCrPositioning = 0x0213 + ReferenceBlackWhite = 0x0214 + XMLPacket = 0x02BC + RelatedImageFileFormat = 0x1000 + RelatedImageWidth = 0x1001 + RelatedImageLength = 0x1002 + Rating = 0x4746 + RatingPercent = 0x4749 + ImageID = 0x800D + CFARepeatPatternDim = 0x828D + BatteryLevel = 0x828F + Copyright = 0x8298 + ExposureTime = 0x829A + FNumber = 0x829D + IPTCNAA = 0x83BB + ImageResources = 0x8649 + ExifOffset = 0x8769 + InterColorProfile = 0x8773 + ExposureProgram = 0x8822 + SpectralSensitivity = 0x8824 + GPSInfo = 0x8825 + ISOSpeedRatings = 0x8827 + OECF = 0x8828 + Interlace = 0x8829 + TimeZoneOffset = 0x882A + SelfTimerMode = 0x882B + SensitivityType = 0x8830 + StandardOutputSensitivity = 0x8831 + RecommendedExposureIndex = 0x8832 + ISOSpeed = 0x8833 + ISOSpeedLatitudeyyy = 0x8834 + ISOSpeedLatitudezzz = 0x8835 + ExifVersion = 0x9000 + DateTimeOriginal = 0x9003 + DateTimeDigitized = 0x9004 + OffsetTime = 0x9010 + OffsetTimeOriginal = 0x9011 + OffsetTimeDigitized = 0x9012 + ComponentsConfiguration = 0x9101 + CompressedBitsPerPixel = 0x9102 + ShutterSpeedValue = 0x9201 + ApertureValue = 0x9202 + BrightnessValue = 0x9203 + ExposureBiasValue = 0x9204 + MaxApertureValue = 0x9205 + SubjectDistance = 0x9206 + MeteringMode = 0x9207 + LightSource = 0x9208 + Flash = 0x9209 + FocalLength = 0x920A + Noise = 0x920D + ImageNumber = 0x9211 + SecurityClassification = 0x9212 + ImageHistory = 0x9213 + TIFFEPStandardID = 0x9216 + MakerNote = 0x927C + UserComment = 0x9286 + SubsecTime = 0x9290 + SubsecTimeOriginal = 0x9291 + SubsecTimeDigitized = 0x9292 + AmbientTemperature = 0x9400 + Humidity = 0x9401 + Pressure = 0x9402 + WaterDepth = 0x9403 + Acceleration = 0x9404 + CameraElevationAngle = 0x9405 + XPTitle = 0x9C9B + XPComment = 0x9C9C + XPAuthor = 0x9C9D + XPKeywords = 0x9C9E + XPSubject = 0x9C9F + FlashPixVersion = 0xA000 + ColorSpace = 0xA001 + ExifImageWidth = 0xA002 + ExifImageHeight = 0xA003 + RelatedSoundFile = 0xA004 + ExifInteroperabilityOffset = 0xA005 + FlashEnergy = 0xA20B + SpatialFrequencyResponse = 0xA20C + FocalPlaneXResolution = 0xA20E + FocalPlaneYResolution = 0xA20F + FocalPlaneResolutionUnit = 0xA210 + SubjectLocation = 0xA214 + ExposureIndex = 0xA215 + SensingMethod = 0xA217 + FileSource = 0xA300 + SceneType = 0xA301 + CFAPattern = 0xA302 + CustomRendered = 0xA401 + ExposureMode = 0xA402 + WhiteBalance = 0xA403 + DigitalZoomRatio = 0xA404 + FocalLengthIn35mmFilm = 0xA405 + SceneCaptureType = 0xA406 + GainControl = 0xA407 + Contrast = 0xA408 + Saturation = 0xA409 + Sharpness = 0xA40A + DeviceSettingDescription = 0xA40B + SubjectDistanceRange = 0xA40C + ImageUniqueID = 0xA420 + CameraOwnerName = 0xA430 + BodySerialNumber = 0xA431 + LensSpecification = 0xA432 + LensMake = 0xA433 + LensModel = 0xA434 + LensSerialNumber = 0xA435 + CompositeImage = 0xA460 + CompositeImageCount = 0xA461 + CompositeImageExposureTimes = 0xA462 + Gamma = 0xA500 + PrintImageMatching = 0xC4A5 + DNGVersion = 0xC612 + DNGBackwardVersion = 0xC613 + UniqueCameraModel = 0xC614 + LocalizedCameraModel = 0xC615 + CFAPlaneColor = 0xC616 + CFALayout = 0xC617 + LinearizationTable = 0xC618 + BlackLevelRepeatDim = 0xC619 + BlackLevel = 0xC61A + BlackLevelDeltaH = 0xC61B + BlackLevelDeltaV = 0xC61C + WhiteLevel = 0xC61D + DefaultScale = 0xC61E + DefaultCropOrigin = 0xC61F + DefaultCropSize = 0xC620 + ColorMatrix1 = 0xC621 + ColorMatrix2 = 0xC622 + CameraCalibration1 = 0xC623 + CameraCalibration2 = 0xC624 + ReductionMatrix1 = 0xC625 + ReductionMatrix2 = 0xC626 + AnalogBalance = 0xC627 + AsShotNeutral = 0xC628 + AsShotWhiteXY = 0xC629 + BaselineExposure = 0xC62A + BaselineNoise = 0xC62B + BaselineSharpness = 0xC62C + BayerGreenSplit = 0xC62D + LinearResponseLimit = 0xC62E + CameraSerialNumber = 0xC62F + LensInfo = 0xC630 + ChromaBlurRadius = 0xC631 + AntiAliasStrength = 0xC632 + ShadowScale = 0xC633 + DNGPrivateData = 0xC634 + MakerNoteSafety = 0xC635 + CalibrationIlluminant1 = 0xC65A + CalibrationIlluminant2 = 0xC65B + BestQualityScale = 0xC65C + RawDataUniqueID = 0xC65D + OriginalRawFileName = 0xC68B + OriginalRawFileData = 0xC68C + ActiveArea = 0xC68D + MaskedAreas = 0xC68E + AsShotICCProfile = 0xC68F + AsShotPreProfileMatrix = 0xC690 + CurrentICCProfile = 0xC691 + CurrentPreProfileMatrix = 0xC692 + ColorimetricReference = 0xC6BF + CameraCalibrationSignature = 0xC6F3 + ProfileCalibrationSignature = 0xC6F4 + AsShotProfileName = 0xC6F6 + NoiseReductionApplied = 0xC6F7 + ProfileName = 0xC6F8 + ProfileHueSatMapDims = 0xC6F9 + ProfileHueSatMapData1 = 0xC6FA + ProfileHueSatMapData2 = 0xC6FB + ProfileToneCurve = 0xC6FC + ProfileEmbedPolicy = 0xC6FD + ProfileCopyright = 0xC6FE + ForwardMatrix1 = 0xC714 + ForwardMatrix2 = 0xC715 + PreviewApplicationName = 0xC716 + PreviewApplicationVersion = 0xC717 + PreviewSettingsName = 0xC718 + PreviewSettingsDigest = 0xC719 + PreviewColorSpace = 0xC71A + PreviewDateTime = 0xC71B + RawImageDigest = 0xC71C + OriginalRawFileDigest = 0xC71D + SubTileBlockSize = 0xC71E + RowInterleaveFactor = 0xC71F + ProfileLookTableDims = 0xC725 + ProfileLookTableData = 0xC726 + OpcodeList1 = 0xC740 + OpcodeList2 = 0xC741 + OpcodeList3 = 0xC74E + NoiseProfile = 0xC761 + + +"""Maps EXIF tags to tag names.""" +TAGS = { + **{i.value: i.name for i in Base}, + 0x920C: "SpatialFrequencyResponse", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x828E: "CFAPattern", + 0x920B: "FlashEnergy", + 0x9216: "TIFF/EPStandardID", +} + + +class GPS(IntEnum): + GPSVersionID = 0 + GPSLatitudeRef = 1 + GPSLatitude = 2 + GPSLongitudeRef = 3 + GPSLongitude = 4 + GPSAltitudeRef = 5 + GPSAltitude = 6 + GPSTimeStamp = 7 + GPSSatellites = 8 + GPSStatus = 9 + GPSMeasureMode = 10 + GPSDOP = 11 + GPSSpeedRef = 12 + GPSSpeed = 13 + GPSTrackRef = 14 + GPSTrack = 15 + GPSImgDirectionRef = 16 + GPSImgDirection = 17 + GPSMapDatum = 18 + GPSDestLatitudeRef = 19 + GPSDestLatitude = 20 + GPSDestLongitudeRef = 21 + GPSDestLongitude = 22 + GPSDestBearingRef = 23 + GPSDestBearing = 24 + GPSDestDistanceRef = 25 + GPSDestDistance = 26 + GPSProcessingMethod = 27 + GPSAreaInformation = 28 + GPSDateStamp = 29 + GPSDifferential = 30 + GPSHPositioningError = 31 + + +"""Maps EXIF GPS tags to tag names.""" +GPSTAGS = {i.value: i.name for i in GPS} diff --git a/sbsheriff/Lib/site-packages/PIL/FitsImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/FitsImagePlugin.py new file mode 100644 index 0000000..c16300e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FitsImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS file handling +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import math + +from . import Image, ImageFile + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FitsImageFile(ImageFile.ImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + headers = {} + while True: + header = self.fp.read(80) + if not header: + raise OSError("Truncated FITS file") + keyword = header[:8].strip() + if keyword == b"END": + break + value = header[8:].strip() + if value.startswith(b"="): + value = value[1:].strip() + if not headers and (not _accept(keyword) or value != b"T"): + raise SyntaxError("Not a FITS file") + headers[keyword] = value + + naxis = int(headers[b"NAXIS"]) + if naxis == 0: + raise ValueError("No image data") + elif naxis == 1: + self._size = 1, int(headers[b"NAXIS1"]) + else: + self._size = int(headers[b"NAXIS1"]), int(headers[b"NAXIS2"]) + + number_of_bits = int(headers[b"BITPIX"]) + if number_of_bits == 8: + self.mode = "L" + elif number_of_bits == 16: + self.mode = "I" + # rawmode = "I;16S" + elif number_of_bits == 32: + self.mode = "I" + elif number_of_bits in (-32, -64): + self.mode = "F" + # rawmode = "F" if number_of_bits == -32 else "F;64F" + + offset = math.ceil(self.fp.tell() / 2880) * 2880 + self.tile = [("raw", (0, 0) + self.size, offset, (self.mode, 0, -1))] + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FitsImageFile.format, FitsImageFile, _accept) + +Image.register_extensions(FitsImageFile.format, [".fit", ".fits"]) diff --git a/sbsheriff/Lib/site-packages/PIL/FitsStubImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..440240a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FitsStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import FitsImagePlugin, Image, ImageFile +from ._deprecate import deprecate + +_handler = None + + +def register_handler(handler): + """ + Install application-specific FITS image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + deprecate( + "FitsStubImagePlugin", + 10, + action="FITS images can now be read without " + "a handler through FitsImagePlugin instead", + ) + + # Override FitsImagePlugin with this handler + # for backwards compatibility + try: + Image.ID.remove(FITSStubImageFile.format) + except ValueError: + pass + + Image.register_open( + FITSStubImageFile.format, FITSStubImageFile, FitsImagePlugin._accept + ) + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = FitsImagePlugin.FitsImageFile.format + format_description = FitsImagePlugin.FitsImageFile.format_description + + def _open(self): + offset = self.fp.tell() + + im = FitsImagePlugin.FitsImageFile(self.fp) + self._size = im.size + self.mode = im.mode + self.tile = [] + + self.fp.seek(offset) + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + raise OSError("FITS save handler not installed") + + +# -------------------------------------------------------------------- +# Registry + +Image.register_save(FITSStubImageFile.format, _save) diff --git a/sbsheriff/Lib/site-packages/PIL/FliImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/FliImagePlugin.py new file mode 100644 index 0000000..908bed9 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FliImagePlugin.py @@ -0,0 +1,171 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + +import os + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import i32le as i32 +from ._binary import o8 + +# +# decoder + + +def _accept(prefix): + return ( + len(prefix) >= 6 + and i16(prefix, 4) in [0xAF11, 0xAF12] + and i16(prefix, 14) in [0, 3] # flags + ) + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # HEAD + s = self.fp.read(128) + if not (_accept(s) and s[20:22] == b"\x00\x00"): + raise SyntaxError("not an FLI/FLC file") + + # frames + self.n_frames = i16(s, 6) + self.is_animated = self.n_frames > 1 + + # image characteristics + self.mode = "P" + self._size = i16(s, 8), i16(s, 10) + + # animation speed + duration = i32(s, 16) + magic = i16(s, 4) + if magic == 0xAF11: + duration = (duration * 1000) // 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s, 4) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s, 4) == 0xF1FA: + # look for palette chunk + number_of_subchunks = i16(s, 6) + chunk_size = None + for _ in range(number_of_subchunks): + if chunk_size is not None: + self.fp.seek(chunk_size - 6, os.SEEK_CUR) + s = self.fp.read(6) + chunk_type = i16(s, 4) + if chunk_type in (4, 11): + self._palette(palette, 2 if chunk_type == 11 else 0) + break + chunk_size = i32(s) + if not chunk_size: + break + + palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self._fp = self.fp + self.__rewind = self.fp.tell() + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + s[0] + n = s[1] + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = s[n] << shift + g = s[n + 1] << shift + b = s[n + 2] << shift + palette[i] = (r, g, b) + i += 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0) + + for f in range(self.__frame + 1, frame + 1): + self._seek(f) + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self._fp.seek(self.__rewind) + self.__offset = 128 + else: + # ensure that the previous frame was loaded + self.load() + + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + self.__frame = frame + + # move to next frame + self.fp = self._fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0) + self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extensions(FliImageFile.format, [".fli", ".flc"]) diff --git a/sbsheriff/Lib/site-packages/PIL/FontFile.py b/sbsheriff/Lib/site-packages/PIL/FontFile.py new file mode 100644 index 0000000..c5fc80b --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FontFile.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +import os + +from . import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + """Write network order (big-endian) 16-bit sequence""" + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +class FontFile: + """Base class for raster font file handlers.""" + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + """Create metrics and bitmap""" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = src[2] - src[0] + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + self.metrics[i] = d, dst, s + + def save(self, filename): + """Save font""" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: + fp.write(b"PILfont\n") + fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) diff --git a/sbsheriff/Lib/site-packages/PIL/FpxImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..a55376d --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FpxImagePlugin.py @@ -0,0 +1,245 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +import olefile + +from . import Image, ImageFile +from ._binary import i32le as i32 + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007FFE,): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017FFE): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for the FlashPix images. + + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + raise SyntaxError("not an FPX file; invalid OLE file") from e + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties( + [f"Data Object Store {index:06d}", "\005Image Contents"] + ) + + # size (highest resolution) + + self._size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + bands = i32(s, 4) + if bands > 4: + raise OSError("Invalid number of bands") + for i in range(bands): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8 + i * 4) & 0x7FFFFFFF) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + f"Data Object Store {index:06d}", + f"Resolution {subimage:04d}", + "Subimage 0000 Header", + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + if size != self.size: + raise OSError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + x1 = min(xsize, x + xtile) + y1 = min(ysize, y + ytile) + + compression = i32(s, i + 8) + + if compression == 0: + self.tile.append( + ( + "raw", + (x, y, x1, y1), + i32(s, i) + 28, + (self.rawmode,), + ) + ) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append( + ( + "fill", + (x, y, x1, y1), + i32(s, i) + 28, + (self.rawmode, s[12:16]), + ) + ) + + elif compression == 2: + + internal_color_conversion = s[14] + jpeg_tables = s[15] + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append( + ( + "jpeg", + (x, y, x1, y1), + i32(s, i) + 28, + (rawmode, jpegmode), + ) + ) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise OSError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) + + return ImageFile.ImageFile.load(self) + + +# +# -------------------------------------------------------------------- + + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/sbsheriff/Lib/site-packages/PIL/FtexImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/FtexImagePlugin.py new file mode 100644 index 0000000..1b714eb --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/FtexImagePlugin.py @@ -0,0 +1,122 @@ +""" +A Pillow loader for .ftc and .ftu files (FTEX) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 + +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a +packed custom format called FTEX. This file format uses file extensions FTC +and FTU. +* FTC files are compressed textures (using standard texture compression). +* FTU files are not compressed. +Texture File Format +The FTC and FTU texture files both use the same format. This +has the following structure: +{header} +{format_directory} +{data} +Where: +{header} = { + u32:magic, + u32:version, + u32:width, + u32:height, + u32:mipmap_count, + u32:format_count +} + +* The "magic" number is "FTEX". +* "width" and "height" are the dimensions of the texture. +* "mipmap_count" is the number of mipmaps in the texture. +* "format_count" is the number of texture formats (different versions of the +same texture) in this file. + +{format_directory} = format_count * { u32:format, u32:where } + +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB +uncompressed textures. +The texture data for a format starts at the position "where" in the file. + +Each set of texture data in the file has the following structure: +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } +* "mipmap_size" is the number of bytes in that mip level. For compressed +textures this is the size of the texture data compressed with DXT1. For 24 bit +uncompressed textures, this is 3 * width * height. Following this are the image +bytes for that mipmap level. + +Note: All data is stored in little-Endian (Intel) byte order. +""" + +import struct +from enum import IntEnum +from io import BytesIO + +from . import Image, ImageFile +from ._deprecate import deprecate + +MAGIC = b"FTEX" + + +class Format(IntEnum): + DXT1 = 0 + UNCOMPRESSED = 1 + + +def __getattr__(name): + for enum, prefix in {Format: "FORMAT_"}.items(): + if name.startswith(prefix): + name = name[len(prefix) :] + if name in enum.__members__: + deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +class FtexImageFile(ImageFile.ImageFile): + format = "FTEX" + format_description = "Texture File Format (IW2:EOC)" + + def _open(self): + if not _accept(self.fp.read(4)): + raise SyntaxError("not an FTEX file") + struct.unpack("= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2) + + +## +# Image plugin for the GIMP brush format. + + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + header_size = i32(self.fp.read(4)) + if header_size < 20: + raise SyntaxError("not a GIMP brush") + version = i32(self.fp.read(4)) + if version not in (1, 2): + raise SyntaxError(f"Unsupported GIMP brush version: {version}") + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0: + raise SyntaxError("not a GIMP brush") + if color_depth not in (1, 4): + raise SyntaxError(f"Unsupported GIMP brush color depth: {color_depth}") + + if version == 1: + comment_length = header_size - 20 + else: + comment_length = header_size - 28 + magic_number = self.fp.read(4) + if magic_number != b"GIMP": + raise SyntaxError("not a GIMP brush, bad magic number") + self.info["spacing"] = i32(self.fp.read(4)) + + comment = self.fp.read(comment_length)[:-1] + + if color_depth == 1: + self.mode = "L" + else: + self.mode = "RGBA" + + self._size = width, height + + self.info["comment"] = comment + + # Image might not be small + Image._decompression_bomb_check(self.size) + + # Data is an uncompressed block of w * h * bytes/pixel + self._data_size = width * height * color_depth + + def load(self): + if not self.im: + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self._data_size)) + return Image.Image.load(self) + + +# +# registry + + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/sbsheriff/Lib/site-packages/PIL/GdImageFile.py b/sbsheriff/Lib/site-packages/PIL/GdImageFile.py new file mode 100644 index 0000000..1ac3b67 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/GdImageFile.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +""" +.. note:: + This format cannot be automatically recognized, so the + class is not registered for use with :py:func:`PIL.Image.open()`. To open a + gd file, use the :py:func:`PIL.GdImageFile.open()` function instead. + +.. warning:: + THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This + implementation is provided for convenience and demonstrational + purposes only. +""" + + +from . import ImageFile, ImagePalette, UnidentifiedImageError +from ._binary import i16be as i16 +from ._binary import i32be as i32 + + +class GdImageFile(ImageFile.ImageFile): + """ + Image plugin for the GD uncompressed format. Note that this format + is not supported by the standard :py:func:`PIL.Image.open()` function. To use + this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and + use the :py:func:`PIL.GdImageFile.open()` function. + """ + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(1037) + + if not i16(s) in [65534, 65535]: + raise SyntaxError("Not a valid GD 2.x .gd file") + + self.mode = "L" # FIXME: "P" + self._size = i16(s, 2), i16(s, 4) + + true_color = s[6] + true_color_offset = 2 if true_color else 0 + + # transparency index + tindex = i32(s, 7 + true_color_offset) + if tindex < 256: + self.info["transparency"] = tindex + + self.palette = ImagePalette.raw( + "XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4] + ) + + self.tile = [ + ( + "raw", + (0, 0) + self.size, + 7 + true_color_offset + 4 + 256 * 4, + ("L", 0, 1), + ) + ] + + +def open(fp, mode="r"): + """ + Load texture from a GD image file. + + :param fp: GD file name, or an opened file handle. + :param mode: Optional mode. In this version, if the mode argument + is given, it must be "r". + :returns: An image instance. + :raises OSError: If the image could not be read. + """ + if mode != "r": + raise ValueError("bad mode") + + try: + return GdImageFile(fp) + except SyntaxError as e: + raise UnidentifiedImageError("cannot identify this image file") from e diff --git a/sbsheriff/Lib/site-packages/PIL/GifImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000..dd1b21f --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,1062 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import itertools +import math +import os +import subprocess +from enum import IntEnum + +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + + +class LoadingStrategy(IntEnum): + """.. versionadded:: 9.1.0""" + + RGB_AFTER_FIRST = 0 + RGB_AFTER_DIFFERENT_PALETTE_ONLY = 1 + RGB_ALWAYS = 2 + + +#: .. versionadded:: 9.1.0 +LOADING_STRATEGY = LoadingStrategy.RGB_AFTER_FIRST + +# -------------------------------------------------------------------- +# Identify/read GIF files + + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + _close_exclusive_fp_after_loading = False + + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and s[0]: + return self.fp.read(s[0]) + return None + + def _is_palette_needed(self, p): + for i in range(0, len(p), 3): + if not (i // 3 == p[i] == p[i + 1] == p[i + 2]): + return True + return False + + def _open(self): + + # Screen + s = self.fp.read(13) + if not _accept(s): + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self._size = i16(s, 6), i16(s, 8) + self.tile = [] + flags = s[10] + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = s[11] + # check if palette contains colour indices + p = self.fp.read(3 << bits) + if self._is_palette_needed(p): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + + self._fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1, False) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + if self._n_frames is not None: + self._is_animated = self._n_frames != 1 + else: + current = self.tell() + if current: + self._is_animated = True + else: + try: + self._seek(1, False) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self.im = None + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + raise EOFError("no more images in GIF file") from e + + def _seek(self, frame, update_image=True): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.__frame = -1 + self._fp.seek(self.__rewind) + self.disposal_method = 0 + if "comment" in self.info: + del self.info["comment"] + else: + # ensure that the previous frame was loaded + if self.tile and update_image: + self.load() + + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + + self.fp = self._fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + s = self.fp.read(1) + if not s or s == b";": + raise EOFError + + palette = None + + info = {} + frame_transparency = None + interlace = None + frame_dispose_extent = None + while True: + + if not s: + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if s[0] == 249: + # + # graphic control extension + # + flags = block[0] + if flags & 1: + frame_transparency = block[3] + info["duration"] = i16(block, 1) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif s[0] == 254: + # + # comment extension + # + comment = b"" + + # Read this comment block + while block: + comment += block + block = self.data() + + if "comment" in info: + # If multiple comment blocks in frame, separate with \n + info["comment"] += b"\n" + comment + else: + info["comment"] = comment + s = None + continue + elif s[0] == 255 and frame == 0: + # + # application extension + # + info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and block[0] == 1: + self.info["loop"] = i16(block, 1) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s, 0), i16(s, 2) + x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6) + if (x1 > self.size[0] or y1 > self.size[1]) and update_image: + self._size = max(x1, self.size[0]), max(y1, self.size[1]) + Image._decompression_bomb_check(self._size) + frame_dispose_extent = x0, y0, x1, y1 + flags = s[8] + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + p = self.fp.read(3 << bits) + if self._is_palette_needed(p): + palette = ImagePalette.raw("RGB", p) + else: + palette = False + + # image data + bits = self.fp.read(1)[0] + self.__offset = self.fp.tell() + break + + else: + pass + # raise OSError, "illegal GIF tag `%x`" % s[0] + s = None + + if interlace is None: + # self._fp = None + raise EOFError + + self.__frame = frame + if not update_image: + return + + self.tile = [] + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + self._frame_palette = palette if palette is not None else self.global_palette + self._frame_transparency = frame_transparency + if frame == 0: + if self._frame_palette: + if LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: + self.mode = "RGBA" if frame_transparency is not None else "RGB" + else: + self.mode = "P" + else: + self.mode = "L" + + if not palette and self.global_palette: + from copy import copy + + palette = copy(self.global_palette) + self.palette = palette + else: + if self.mode == "P": + if ( + LOADING_STRATEGY != LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY + or palette + ): + self.pyaccess = None + if "transparency" in self.info: + self.im.putpalettealpha(self.info["transparency"], 0) + self.im = self.im.convert("RGBA", Image.Dither.FLOYDSTEINBERG) + self.mode = "RGBA" + del self.info["transparency"] + else: + self.mode = "RGB" + self.im = self.im.convert("RGB", Image.Dither.FLOYDSTEINBERG) + + def _rgb(color): + if self._frame_palette: + color = tuple(self._frame_palette.palette[color * 3 : color * 3 + 3]) + else: + color = (color, color, color) + return color + + self.dispose_extent = frame_dispose_extent + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + + # only dispose the extent in this frame + x0, y0, x1, y1 = self.dispose_extent + dispose_size = (x1 - x0, y1 - y0) + + Image._decompression_bomb_check(dispose_size) + + # by convention, attempt to use transparency first + dispose_mode = "P" + color = self.info.get("transparency", frame_transparency) + if color is not None: + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGBA" + color = _rgb(color) + (0,) + else: + color = self.info.get("background", 0) + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGB" + color = _rgb(color) + self.dispose = Image.core.fill(dispose_mode, dispose_size, color) + else: + # replace with previous contents + if self.im is not None: + # only dispose the extent in this frame + self.dispose = self._crop(self.im, self.dispose_extent) + elif frame_transparency is not None: + x0, y0, x1, y1 = self.dispose_extent + dispose_size = (x1 - x0, y1 - y0) + + Image._decompression_bomb_check(dispose_size) + dispose_mode = "P" + color = frame_transparency + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGBA" + color = _rgb(frame_transparency) + (0,) + self.dispose = Image.core.fill(dispose_mode, dispose_size, color) + except AttributeError: + pass + + if interlace is not None: + transparency = -1 + if frame_transparency is not None: + if frame == 0: + if LOADING_STRATEGY != LoadingStrategy.RGB_ALWAYS: + self.info["transparency"] = frame_transparency + elif self.mode not in ("RGB", "RGBA"): + transparency = frame_transparency + self.tile = [ + ( + "gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace, transparency), + ) + ] + + if info.get("comment"): + self.info["comment"] = info["comment"] + for k in ["duration", "extension"]: + if k in info: + self.info[k] = info[k] + elif k in self.info: + del self.info[k] + + def load_prepare(self): + temp_mode = "P" if self._frame_palette else "L" + self._prev_im = None + if self.__frame == 0: + if self._frame_transparency is not None: + self.im = Image.core.fill( + temp_mode, self.size, self._frame_transparency + ) + elif self.mode in ("RGB", "RGBA"): + self._prev_im = self.im + if self._frame_palette: + self.im = Image.core.fill("P", self.size, self._frame_transparency or 0) + self.im.putpalette(*self._frame_palette.getdata()) + else: + self.im = None + self.mode = temp_mode + self._frame_palette = None + + super().load_prepare() + + def load_end(self): + if self.__frame == 0: + if self.mode == "P" and LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: + if self._frame_transparency is not None: + self.im.putpalettealpha(self._frame_transparency, 0) + self.mode = "RGBA" + else: + self.mode = "RGB" + self.im = self.im.convert(self.mode, Image.Dither.FLOYDSTEINBERG) + return + if not self._prev_im: + return + if self._frame_transparency is not None: + self.im.putpalettealpha(self._frame_transparency, 0) + frame_im = self.im.convert("RGBA") + else: + frame_im = self.im.convert("RGB") + frame_im = self._crop(frame_im, self.dispose_extent) + + self.im = self._prev_im + self.mode = self.im.mode + if frame_im.mode == "RGBA": + self.im.paste(frame_im, self.dispose_extent, frame_im) + else: + self.im.paste(frame_im, self.dispose_extent) + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------- +# Write GIF files + + +RAWMODE = {"1": "L", "L": "L", "P": "P"} + + +def _normalize_mode(im): + """ + Takes an image (or frame), returns an image in a mode that is appropriate + for saving in a Gif. + + It may return the original image, or it may return an image converted to + palette or 'L' mode. + + :param im: Image object + :returns: Image object + """ + if im.mode in RAWMODE: + im.load() + return im + if Image.getmodebase(im.mode) == "RGB": + im = im.convert("P", palette=Image.Palette.ADAPTIVE) + if im.palette.mode == "RGBA": + for rgba in im.palette.colors.keys(): + if rgba[3] == 0: + im.info["transparency"] = im.palette.colors[rgba] + break + return im + return im.convert("L") + + +def _normalize_palette(im, palette, info): + """ + Normalizes the palette for image. + - Sets the palette to the incoming palette, if provided. + - Ensures that there's a palette for L mode images + - Optimizes the palette if necessary/desired. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: Image object + """ + source_palette = None + if palette: + # a bytes palette + if isinstance(palette, (bytes, bytearray, list)): + source_palette = bytearray(palette[:768]) + if isinstance(palette, ImagePalette.ImagePalette): + source_palette = bytearray(palette.palette) + + if im.mode == "P": + if not source_palette: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if not source_palette: + source_palette = bytearray(i // 3 for i in range(768)) + im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette) + + if palette: + used_palette_colors = [] + for i in range(0, len(source_palette), 3): + source_color = tuple(source_palette[i : i + 3]) + index = im.palette.colors.get(source_color) + if index in used_palette_colors: + index = None + used_palette_colors.append(index) + for i, index in enumerate(used_palette_colors): + if index is None: + for j in range(len(used_palette_colors)): + if j not in used_palette_colors: + used_palette_colors[i] = j + break + im = im.remap_palette(used_palette_colors) + else: + used_palette_colors = _get_optimize(im, info) + if used_palette_colors is not None: + return im.remap_palette(used_palette_colors, source_palette) + + im.palette.palette = source_palette + return im + + +def _write_single_frame(im, fp, palette): + im_out = _normalize_mode(im) + for k, v in im_out.info.items(): + im.encoderinfo.setdefault(k, v) + im_out = _normalize_palette(im_out, palette, im.encoderinfo) + + for s in _get_global_header(im_out, im.encoderinfo): + fp.write(s) + + # local image header + flags = 0 + if get_interlace(im): + flags = flags | 64 + _write_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + +def _write_multiple_frames(im, fp, palette): + + duration = im.encoderinfo.get("duration") + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) + + im_frames = [] + frame_count = 0 + background_im = None + for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])): + for im_frame in ImageSequence.Iterator(imSequence): + # a copy is required here since seek can still mutate the image + im_frame = _normalize_mode(im_frame.copy()) + if frame_count == 0: + for k, v in im_frame.info.items(): + if k == "transparency": + continue + im.encoderinfo.setdefault(k, v) + + encoderinfo = im.encoderinfo.copy() + im_frame = _normalize_palette(im_frame, palette, encoderinfo) + if "transparency" in im_frame.info: + encoderinfo.setdefault("transparency", im_frame.info["transparency"]) + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + elif duration is None and "duration" in im_frame.info: + encoderinfo["duration"] = im_frame.info["duration"] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + frame_count += 1 + + if im_frames: + # delta frame + previous = im_frames[-1] + if encoderinfo.get("disposal") == 2: + if background_im is None: + color = im.encoderinfo.get( + "transparency", im.info.get("transparency", (0, 0, 0)) + ) + background = _get_background(im_frame, color) + background_im = Image.new("P", im_frame.size, background) + background_im.putpalette(im_frames[0]["im"].palette) + base_im = background_im + else: + base_im = previous["im"] + if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im): + delta = ImageChops.subtract_modulo(im_frame, base_im) + else: + delta = ImageChops.subtract_modulo( + im_frame.convert("RGB"), base_im.convert("RGB") + ) + bbox = delta.getbbox() + if not bbox: + # This frame is identical to the previous frame + if duration: + previous["encoderinfo"]["duration"] += encoderinfo["duration"] + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + if len(im_frames) > 1: + for frame_data in im_frames: + im_frame = frame_data["im"] + if not frame_data["bbox"]: + # global header + for s in _get_global_header(im_frame, frame_data["encoderinfo"]): + fp.write(s) + offset = (0, 0) + else: + # compress difference + if not palette: + frame_data["encoderinfo"]["include_color_table"] = True + + im_frame = im_frame.crop(frame_data["bbox"]) + offset = frame_data["bbox"][:2] + _write_frame_data(fp, im_frame, offset, frame_data["encoderinfo"]) + return True + elif "duration" in im.encoderinfo and isinstance( + im.encoderinfo["duration"], (list, tuple) + ): + # Since multiple frames will not be written, add together the frame durations + im.encoderinfo["duration"] = sum(im.encoderinfo["duration"]) + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + # header + if "palette" in im.encoderinfo or "palette" in im.info: + palette = im.encoderinfo.get("palette", im.info.get("palette")) + else: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if not save_all or not _write_multiple_frames(im, fp, palette): + _write_single_frame(im, fp, palette) + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + interlace = im.encoderinfo.get("interlace", 1) + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _write_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + if "transparency" in im.encoderinfo: + transparency = im.encoderinfo["transparency"] + else: + transparency = im.info["transparency"] + transparency = int(transparency) + except (KeyError, ValueError): + pass + else: + # optimize the block away if transparent color is not used + transparent_color_exists = True + + used_palette_colors = _get_optimize(im, im.encoderinfo) + if used_palette_colors is not None: + # adjust the transparency index after optimize + try: + transparency = used_palette_colors.index(transparency) + except ValueError: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + + disposal = int(im.encoderinfo.get("disposal", 0)) + + if transparent_color_exists or duration != 0 or disposal: + packed_flag = 1 if transparent_color_exists else 0 + packed_flag |= disposal << 2 + if not transparent_color_exists: + transparency = 0 + + fp.write( + b"!" + + o8(249) # extension intro + + o8(4) # length + + o8(packed_flag) # packed fields + + o16(duration) # duration + + o8(transparency) # transparency index + + o8(0) + ) + + include_color_table = im.encoderinfo.get("include_color_table") + if include_color_table: + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + if color_table_size: + flags = flags | 128 # local color table flag + flags = flags | color_table_size + + fp.write( + b"," + + o16(offset[0]) # offset + + o16(offset[1]) + + o16(im.size[0]) # size + + o16(im.size[1]) + + o8(flags) # flags + ) + if include_color_table and color_table_size: + fp.write(_get_header_palette(palette_bytes)) + fp.write(o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # Unused by default. + # To use, uncomment the register_save call at the end of the file. + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + tempfile = im._dump() + + try: + with open(filename, "wb") as f: + if im.mode != "RGB": + subprocess.check_call( + ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL + ) + else: + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename) + quant_cmd = ["ppmquant", "256", tempfile] + togif_cmd = ["ppmtogif"] + quant_proc = subprocess.Popen( + quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + togif_proc = subprocess.Popen( + togif_cmd, + stdin=quant_proc.stdout, + stdout=f, + stderr=subprocess.DEVNULL, + ) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, togif_cmd) + finally: + try: + os.unlink(tempfile) + except OSError: + pass + + +# Force optimization so that we can test performance against +# cases where it took lots of memory and time previously. +_FORCE_OPTIMIZE = False + + +def _get_optimize(im, info): + """ + Palette optimization is a potentially expensive operation. + + This function determines if the palette should be optimized using + some heuristics, then returns the list of palette entries in use. + + :param im: Image object + :param info: encoderinfo + :returns: list of indexes of palette entries in use, or None + """ + if im.mode in ("P", "L") and info and info.get("optimize", 0): + # Potentially expensive operation. + + # The palette saves 3 bytes per color not used, but palette + # lengths are restricted to 3*(2**N) bytes. Max saving would + # be 768 -> 6 bytes if we went all the way down to 2 colors. + # * If we're over 128 colors, we can't save any space. + # * If there aren't any holes, it's not worth collapsing. + # * If we have a 'large' image, the palette is in the noise. + + # create the new palette if not every color is used + optimise = _FORCE_OPTIMIZE or im.mode == "L" + if optimise or im.width * im.height < 512 * 512: + # check which colors are used + used_palette_colors = [] + for i, count in enumerate(im.histogram()): + if count: + used_palette_colors.append(i) + + if optimise or max(used_palette_colors) >= len(used_palette_colors): + return used_palette_colors + + num_palette_colors = len(im.palette.palette) // Image.getmodebands( + im.palette.mode + ) + current_palette_size = 1 << (num_palette_colors - 1).bit_length() + if ( + # check that the palette would become smaller when saved + len(used_palette_colors) <= current_palette_size // 2 + # check that the palette is not already the smallest possible size + and current_palette_size > 2 + ): + return used_palette_colors + + +def _get_color_table_size(palette_bytes): + # calculate the palette size for the header + if not palette_bytes: + return 0 + elif len(palette_bytes) < 9: + return 1 + else: + return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1 + + +def _get_header_palette(palette_bytes): + """ + Returns the palette, null padded to the next power of 2 (*3) bytes + suitable for direct inclusion in the GIF header + + :param palette_bytes: Unpadded palette bytes, in RGBRGB form + :returns: Null padded palette + """ + color_table_size = _get_color_table_size(palette_bytes) + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + return palette_bytes + + +def _get_palette_bytes(im): + """ + Gets the palette for inclusion in the gif header + + :param im: Image object + :returns: Bytes, len<=768 suitable for inclusion in gif header + """ + return im.palette.palette + + +def _get_background(im, info_background): + background = 0 + if info_background: + background = info_background + if isinstance(background, tuple): + # WebPImagePlugin stores an RGBA value in info["background"] + # So it must be converted to the same format as GifImagePlugin's + # info["background"] - a global color table index + try: + background = im.palette.getcolor(background, im) + except ValueError as e: + if str(e) == "cannot allocate more than 256 colors": + # If all 256 colors are in use, + # then there is no need for the background color + return 0 + else: + raise + return background + + +def _get_global_header(im, info): + """Return a list of strings representing a GIF header""" + + # Header Block + # https://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + if im.info.get("version") == b"89a" or ( + info + and ( + "transparency" in info + or "loop" in info + or info.get("duration") + or info.get("comment") + ) + ): + version = b"89a" + + background = _get_background(im, info.get("background")) + + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + + header = [ + b"GIF" # signature + + version # version + + o16(im.size[0]) # canvas width + + o16(im.size[1]), # canvas height + # Logical Screen Descriptor + # size of global color table + global color table flag + o8(color_table_size + 128), # packed fields + # background + reserved/aspect + o8(background) + o8(0), + # Global Color Table + _get_header_palette(palette_bytes), + ] + if "loop" in info: + header.append( + b"!" + + o8(255) # extension intro + + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(info["loop"]) # number of loops + + o8(0) + ) + if info.get("comment"): + comment_block = b"!" + o8(254) # extension intro + + comment = info["comment"] + if isinstance(comment, str): + comment = comment.encode() + for i in range(0, len(comment), 255): + subblock = comment[i : i + 255] + comment_block += o8(len(subblock)) + subblock + + comment_block += o8(0) + header.append(comment_block) + return header + + +def _write_frame_data(fp, im_frame, offset, params): + try: + im_frame.encoderinfo = params + + # local image header + _write_local_header(fp, im_frame, offset, 0) + + ImageFile._save( + im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])] + ) + + fp.write(b"\0") # end of image data + finally: + del im_frame.encoderinfo + + +# -------------------------------------------------------------------- +# Legacy GIF utilities + + +def getheader(im, palette=None, info=None): + """ + Legacy Method to get Gif data from image. + + Warning:: May modify image data. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: tuple of(list of header items, optimized palette) + + """ + used_palette_colors = _get_optimize(im, info) + + if info is None: + info = {} + + if "background" not in info and "background" in im.info: + info["background"] = im.info["background"] + + im_mod = _normalize_palette(im, palette, info) + im.palette = im_mod.palette + im.im = im_mod.im + header = _get_global_header(im, info) + + return header, used_palette_colors + + +def getdata(im, offset=(0, 0), **params): + """ + Legacy Method + + Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data. + + To specify duration, add the time in milliseconds, + e.g. ``getdata(im_frame, duration=1000)`` + + :param im: Image object + :param offset: Tuple of (x, y) pixels. Defaults to (0, 0) + :param \\**params: e.g. duration or other encoder info parameters + :returns: List of bytes containing GIF encoded frame data + + """ + + class Collector: + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + _write_frame_data(fp, im, offset, params) + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/sbsheriff/Lib/site-packages/PIL/GimpGradientFile.py b/sbsheriff/Lib/site-packages/PIL/GimpGradientFile.py new file mode 100644 index 0000000..7ab7f99 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/GimpGradientFile.py @@ -0,0 +1,140 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +""" +Stuff to translate curve segments to palette values (derived from +the corresponding code in GIMP, written by Federico Mena Quintero. +See the GIMP distribution for more information.) +""" + + +from math import log, pi, sin, sqrt + +from ._binary import o8 + +EPSILON = 1e-10 +"""""" # Enable auto-doc for data member + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] +"""""" # Enable auto-doc for data member + + +class GradientFile: + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / (entries - 1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +class GimpGradientFile(GradientFile): + """File handler for GIMP's gradient format.""" + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise OSError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/sbsheriff/Lib/site-packages/PIL/GimpPaletteFile.py b/sbsheriff/Lib/site-packages/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..4d7cfba --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/GimpPaletteFile.py @@ -0,0 +1,56 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re + +from ._binary import o8 + + +class GimpPaletteFile: + """File handler for GIMP's palette format.""" + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i) * 3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + for i in range(256): + + s = fp.readline() + if not s: + break + + # skip fields and comment lines + if re.match(rb"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/sbsheriff/Lib/site-packages/PIL/GribStubImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..4575f82 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/GribStubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific GRIB image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:4] == b"GRIB" and prefix[7] == 1 + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise OSError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/sbsheriff/Lib/site-packages/PIL/Hdf5StubImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..df11cf2 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific HDF5 image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise OSError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) diff --git a/sbsheriff/Lib/site-packages/PIL/IcnsImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..fa192f0 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/IcnsImagePlugin.py @@ -0,0 +1,392 @@ +# +# The Python Imaging Library. +# $Id$ +# +# macOS icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# 2020-04-04 Allow saving on all operating systems. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# Copyright (c) 2020 by Pan Jing. +# +# See the README file for information on usage and redistribution. +# + +import io +import os +import struct +import sys + +from PIL import Image, ImageFile, PngImagePlugin, features + +enable_jpeg2k = features.check_codec("jpg_2000") +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +MAGIC = b"icns" +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack(">4sI", fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b"\x00\x00\x00\x00": + raise SyntaxError("Unknown signature, expecting 0x00000000") + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = byte[0] + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError(f"Error reading channel [{repr(bytesleft)} left]") + band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a": + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + Image._decompression_bomb_check(im.size) + return {"RGBA": im} + elif ( + sig[:4] == b"\xff\x4f\xff\x51" + or sig[:4] == b"\x0d\x0a\x87\x0a" + or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ): + if not enable_jpeg2k: + raise ValueError( + "Unsupported icon subimage format (rebuild PIL " + "with JPEG 2000 support to fix this)" + ) + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + Image._decompression_bomb_check(im.size) + if im.mode != "RGBA": + im = im.convert("RGBA") + return {"RGBA": im} + else: + raise ValueError("Unsupported icon subimage format") + + +class IcnsFile: + + SIZES = { + (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)], + (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)], + (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)], + (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)], + (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)], + (128, 128, 1): [ + (b"ic07", read_png_or_jpeg2000), + (b"it32", read_32t), + (b"t8mk", read_mk), + ], + (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)], + (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)], + (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)], + (32, 32, 1): [ + (b"icp5", read_png_or_jpeg2000), + (b"il32", read_32), + (b"l8mk", read_mk), + ], + (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)], + (16, 16, 1): [ + (b"icp4", read_png_or_jpeg2000), + (b"is32", read_32), + (b"s8mk", read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if not _accept(sig): + raise SyntaxError("not an icns file") + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError("invalid block header") + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, io.SEEK_CUR) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get("RGBA", None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = "RGBA" + self.info["sizes"] = self.icns.itersizes() + self.best_size = self.icns.bestsize() + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + info_size = value + if info_size not in self.info["sizes"] and len(info_size) == 2: + info_size = (info_size[0], info_size[1], 1) + if ( + info_size not in self.info["sizes"] + and len(info_size) == 3 + and info_size[2] == 1 + ): + simple_sizes = [ + (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"] + ] + if value in simple_sizes: + info_size = self.info["sizes"][simple_sizes.index(value)] + if info_size not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + px = Image.Image.load(self) + if self.im is not None and self.im.size == self.size: + # Already loaded + return px + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + px = im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + + return px + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then combined into a .icns file. + """ + if hasattr(fp, "flush"): + fp.flush() + + sizes = { + b"ic07": 128, + b"ic08": 256, + b"ic09": 512, + b"ic10": 1024, + b"ic11": 32, + b"ic12": 64, + b"ic13": 256, + b"ic14": 512, + } + provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])} + size_streams = {} + for size in set(sizes.values()): + image = ( + provided_images[size] + if size in provided_images + else im.resize((size, size)) + ) + + temp = io.BytesIO() + image.save(temp, "png") + size_streams[size] = temp.getvalue() + + entries = [] + for type, size in sizes.items(): + stream = size_streams[size] + entries.append( + {"type": type, "size": HEADERSIZE + len(stream), "stream": stream} + ) + + # Header + fp.write(MAGIC) + file_length = HEADERSIZE # Header + file_length += HEADERSIZE + 8 * len(entries) # TOC + file_length += sum(entry["size"] for entry in entries) + fp.write(struct.pack(">i", file_length)) + + # TOC + fp.write(b"TOC ") + fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE)) + for entry in entries: + fp.write(entry["type"]) + fp.write(struct.pack(">i", entry["size"])) + + # Data + for entry in entries: + fp.write(entry["type"]) + fp.write(struct.pack(">i", entry["size"])) + fp.write(entry["stream"]) + + if hasattr(fp, "flush"): + fp.flush() + + +def _accept(prefix): + return prefix[:4] == MAGIC + + +Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept) +Image.register_extension(IcnsImageFile.format, ".icns") + +Image.register_save(IcnsImageFile.format, _save) +Image.register_mime(IcnsImageFile.format, "image/icns") + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Syntax: python3 IcnsImagePlugin.py [file]") + sys.exit() + + with open(sys.argv[1], "rb") as fp: + imf = IcnsImageFile(fp) + for size in imf.info["sizes"]: + imf.size = size + imf.save("out-%s-%s-%s.png" % size) + with Image.open(sys.argv[1]) as im: + im.save("out.png") + if sys.platform == "windows": + os.startfile("out.png") diff --git a/sbsheriff/Lib/site-packages/PIL/IcoImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..17b9855 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/IcoImagePlugin.py @@ -0,0 +1,355 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * https://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import warnings +from io import BytesIO +from math import ceil, log + +from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin +from ._binary import i16le as i16 +from ._binary import i32le as i32 +from ._binary import o8 +from ._binary import o16le as o16 +from ._binary import o32le as o32 + +# +# -------------------------------------------------------------------- + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + bmp = im.encoderinfo.get("bitmap_format") == "bmp" + sizes = im.encoderinfo.get( + "sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)], + ) + frames = [] + provided_ims = [im] + im.encoderinfo.get("append_images", []) + width, height = im.size + for size in sorted(set(sizes)): + if size[0] > width or size[1] > height or size[0] > 256 or size[1] > 256: + continue + + for provided_im in provided_ims: + if provided_im.size != size: + continue + frames.append(provided_im) + if bmp: + bits = BmpImagePlugin.SAVE[provided_im.mode][1] + bits_used = [bits] + for other_im in provided_ims: + if other_im.size != size: + continue + bits = BmpImagePlugin.SAVE[other_im.mode][1] + if bits not in bits_used: + # Another image has been supplied for this size + # with a different bit depth + frames.append(other_im) + bits_used.append(bits) + break + else: + # TODO: invent a more convenient method for proportional scalings + frame = provided_im.copy() + frame.thumbnail(size, Image.Resampling.LANCZOS, reducing_gap=None) + frames.append(frame) + fp.write(o16(len(frames))) # idCount(2) + offset = fp.tell() + len(frames) * 16 + for frame in frames: + width, height = frame.size + # 0 means 256 + fp.write(o8(width if width < 256 else 0)) # bWidth(1) + fp.write(o8(height if height < 256 else 0)) # bHeight(1) + + bits, colors = BmpImagePlugin.SAVE[frame.mode][1:] if bmp else (32, 0) + fp.write(o8(colors)) # bColorCount(1) + fp.write(b"\0") # bReserved(1) + fp.write(b"\0\0") # wPlanes(2) + fp.write(o16(bits)) # wBitCount(2) + + image_io = BytesIO() + if bmp: + frame.save(image_io, "dib") + + if bits != 32: + and_mask = Image.new("1", size) + ImageFile._save( + and_mask, image_io, [("raw", (0, 0) + size, 0, ("1", 0, -1))] + ) + else: + frame.save(image_io, "png") + image_io.seek(0) + image_bytes = image_io.read() + if bmp: + image_bytes = image_bytes[:8] + o32(height * 2) + image_bytes[12:] + bytes_len = len(image_bytes) + fp.write(o32(bytes_len)) # dwBytesInRes(4) + fp.write(o32(offset)) # dwImageOffset(4) + current = fp.tell() + fp.seek(offset) + fp.write(image_bytes) + offset = offset + bytes_len + fp.seek(current) + + +def _accept(prefix): + return prefix[:4] == _MAGIC + + +class IcoFile: + def __init__(self, buf): + """ + Parse image from file-like object containing ico file data + """ + + # check magic + s = buf.read(6) + if not _accept(s): + raise SyntaxError("not an ICO file") + + self.buf = buf + self.entry = [] + + # Number of items in file + self.nb_items = i16(s, 4) + + # Get headers for each item + for i in range(self.nb_items): + s = buf.read(16) + + icon_header = { + "width": s[0], + "height": s[1], + "nb_color": s[2], # No. of colors in image (0 if >=8bpp) + "reserved": s[3], + "planes": i16(s, 4), + "bpp": i16(s, 6), + "size": i32(s, 8), + "offset": i32(s, 12), + } + + # See Wikipedia + for j in ("width", "height"): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header["color_depth"] = ( + icon_header["bpp"] + or ( + icon_header["nb_color"] != 0 + and ceil(log(icon_header["nb_color"], 2)) + ) + or 256 + ) + + icon_header["dim"] = (icon_header["width"], icon_header["height"]) + icon_header["square"] = icon_header["width"] * icon_header["height"] + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x["color_depth"]) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x["square"]) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return {(h["width"], h["height"]) for h in self.entry} + + def getentryindex(self, size, bpp=False): + for (i, h) in enumerate(self.entry): + if size == h["dim"] and (bpp is False or bpp == h["color_depth"]): + return i + return 0 + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + return self.frame(self.getentryindex(size, bpp)) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header["offset"]) + data = self.buf.read(8) + self.buf.seek(header["offset"]) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + Image._decompression_bomb_check(im.size) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + Image._decompression_bomb_check(im.size) + + # change tile dimension to only encompass XOR image + im._size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + bpp = header["bpp"] + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + "L", # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + "raw", # raw decoder + ("L", 0, -1), # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + total_bytes = int((w * im.size[1]) / 8) + and_mask_offset = header["offset"] + header["size"] - total_bytes + + self.buf.seek(and_mask_offset) + mask_data = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + "1", # 1 bpp + im.size, # (w, h) + mask_data, # source chars + "raw", # raw decoder + ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert("RGBA") + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + When saving, PNG compression is used. Support for this was only added in + Windows Vista. If you are unable to view the icon in Windows, convert the + image to "RGBA" mode before saving. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki + """ + + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info["sizes"] = self.ico.sizes() + self.size = self.ico.entry[0]["dim"] + self.load() + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + if value not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + if self.im is not None and self.im.size == self.size: + # Already loaded + return Image.Image.load(self) + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + if im.size != self.size: + warnings.warn("Image was not the expected size") + + index = self.ico.getentryindex(self.size) + sizes = list(self.info["sizes"]) + sizes[index] = im.size + self.info["sizes"] = set(sizes) + + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass + + +# +# -------------------------------------------------------------------- + + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") + +Image.register_mime(IcoImageFile.format, "image/x-icon") diff --git a/sbsheriff/Lib/site-packages/PIL/ImImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/ImImagePlugin.py new file mode 100644 index 0000000..31b0ff4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImImagePlugin.py @@ -0,0 +1,373 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import os +import re + +from . import Image, ImageFile, ImagePalette + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = { + COMMENT: 0, + DATE: 0, + EQUIPMENT: 0, + FRAMES: 0, + LUT: 0, + NAME: 0, + SCALE: 0, + SIZE: 0, + MODE: 0, +} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "PA image": ("LA", "PA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN[f"L {i} image"] = ("F", f"F;{i}") + OPEN[f"L*{i} image"] = ("F", f"F;{i}") +for i in ["16", "16L", "16B"]: + OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}") + OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}") +for i in ["32S"]: + OPEN[f"L {i} image"] = ("I", f"I;{i}") + OPEN[f"L*{i} image"] = ("I", f"I;{i}") +for i in range(2, 33): + OPEN[f"L*{i} image"] = ("F", f"F;{i}") + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b"\0" or s == b"\x1A": + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] == b"\n": + s = s[:-1] + + try: + m = split.match(s) + except re.error as e: + raise SyntaxError("not an IM file") from e + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode("latin-1", "replace") + v = v.decode("latin-1", "replace") + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError( + "Syntax error in IM header: " + s.decode("ascii", "replace") + ) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self._size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[:1] != b"\x1A": + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i + 256] == palette[i + 512]: + if palette[i] != i: + linear = 0 + else: + greyscale = 0 + if self.mode in ["L", "LA", "P", "PA"]: + if greyscale: + if not linear: + self.lut = list(palette[:256]) + else: + if self.mode in ["L", "P"]: + self.mode = self.rawmode = "P" + elif self.mode in ["LA", "PA"]: + self.mode = "PA" + self.rawmode = "PA;L" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = list(palette) + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self._fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [ + ("raw", (0, 0) + self.size, offs, ("G", 0, -1)), + ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)), + ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)), + ] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self._fp + + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + return self.frame + + +# +# -------------------------------------------------------------------- +# Save IM files + + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L"), +} + + +def _save(im, fp, filename): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError as e: + raise ValueError(f"Cannot save {im.mode} images as IM") from e + + frames = im.encoderinfo.get("frames", 1) + + fp.write(f"Image type: {image_type} image\r\n".encode("ascii")) + if filename: + # Each line must be 100 characters or less, + # or: SyntaxError("not an IM file") + # 8 characters are used for "Name: " and "\r\n" + # Keep just the filename, ditch the potentially overlong path + name, ext = os.path.splitext(os.path.basename(filename)) + name = "".join([name[: 92 - len(ext)], ext]) + + fp.write(f"Name: {name}\r\n".encode("ascii")) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) + fp.write(f"File size (no of images): {frames}\r\n".encode("ascii")) + if im.mode in ["P", "PA"]: + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511 - fp.tell()) + b"\032") + if im.mode in ["P", "PA"]: + im_palette = im.im.getpalette("RGB", "RGB;L") + colors = len(im_palette) // 3 + palette = b"" + for i in range(3): + palette += im_palette[colors * i : colors * (i + 1)] + palette += b"\x00" * (256 - colors) + fp.write(palette) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/sbsheriff/Lib/site-packages/PIL/Image.py b/sbsheriff/Lib/site-packages/PIL/Image.py new file mode 100644 index 0000000..7faf0c2 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/Image.py @@ -0,0 +1,3760 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import atexit +import builtins +import io +import logging +import math +import os +import re +import struct +import sys +import tempfile +import warnings +from collections.abc import Callable, MutableMapping +from enum import IntEnum +from pathlib import Path + +try: + import defusedxml.ElementTree as ElementTree +except ImportError: + ElementTree = None + +# VERSION was removed in Pillow 6.0.0. +# PILLOW_VERSION was removed in Pillow 9.0.0. +# Use __version__ instead. +from . import ImageMode, TiffTags, UnidentifiedImageError, __version__, _plugins +from ._binary import i32le, o32be, o32le +from ._deprecate import deprecate +from ._util import DeferredError, is_path + + +def __getattr__(name): + categories = {"NORMAL": 0, "SEQUENCE": 1, "CONTAINER": 2} + if name in categories: + deprecate("Image categories", 10, "is_animated", plural=True) + return categories[name] + elif name in ("NEAREST", "NONE"): + deprecate(name, 10, "Resampling.NEAREST or Dither.NONE") + return 0 + old_resampling = { + "LINEAR": "BILINEAR", + "CUBIC": "BICUBIC", + "ANTIALIAS": "LANCZOS", + } + if name in old_resampling: + deprecate(name, 10, f"Resampling.{old_resampling[name]}") + return Resampling[old_resampling[name]] + for enum in (Transpose, Transform, Resampling, Dither, Palette, Quantize): + if name in enum.__members__: + deprecate(name, 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class DecompressionBombError(Exception): + pass + + +# Limit to around a quarter gigabyte for a 24-bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3) + + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from . import _imaging as core + + if __version__ != getattr(core, "PILLOW_VERSION", None): + raise ImportError( + "The _imaging extension was built for another version of Pillow or PIL:\n" + f"Core version: {getattr(core, 'PILLOW_VERSION', None)}\n" + f"Pillow version: {__version__}" + ) + +except ImportError as v: + core = DeferredError(ImportError("The _imaging C module is not installed.")) + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version of Python.", + RuntimeWarning, + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting.rst + raise + + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, "pypy_version_info") +try: + import cffi +except ImportError: + cffi = None + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + + +# +# Constants + +# transpose +class Transpose(IntEnum): + FLIP_LEFT_RIGHT = 0 + FLIP_TOP_BOTTOM = 1 + ROTATE_90 = 2 + ROTATE_180 = 3 + ROTATE_270 = 4 + TRANSPOSE = 5 + TRANSVERSE = 6 + + +# transforms (also defined in Imaging.h) +class Transform(IntEnum): + AFFINE = 0 + EXTENT = 1 + PERSPECTIVE = 2 + QUAD = 3 + MESH = 4 + + +# resampling filters (also defined in Imaging.h) +class Resampling(IntEnum): + NEAREST = 0 + BOX = 4 + BILINEAR = 2 + HAMMING = 5 + BICUBIC = 3 + LANCZOS = 1 + + +_filters_support = { + Resampling.BOX: 0.5, + Resampling.BILINEAR: 1.0, + Resampling.HAMMING: 1.0, + Resampling.BICUBIC: 2.0, + Resampling.LANCZOS: 3.0, +} + + +# dithers +class Dither(IntEnum): + NONE = 0 + ORDERED = 1 # Not yet implemented + RASTERIZE = 2 # Not yet implemented + FLOYDSTEINBERG = 3 # default + + +# palettes/quantizers +class Palette(IntEnum): + WEB = 0 + ADAPTIVE = 1 + + +class Quantize(IntEnum): + MEDIANCUT = 0 + MAXCOVERAGE = 1 + FASTOCTREE = 2 + LIBIMAGEQUANT = 3 + + +if hasattr(core, "DEFAULT_STRATEGY"): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} +DECODERS = {} +ENCODERS = {} + +# -------------------------------------------------------------------- +# Modes + +_ENDIAN = "<" if sys.byteorder == "little" else ">" + + +def _conv_type_shape(im): + m = ImageMode.getmode(im.mode) + shape = (im.height, im.width) + extra = len(m.bands) + if extra != 1: + shape += (extra,) + return shape, m.typestr + + +MODES = ["1", "CMYK", "F", "HSV", "I", "L", "LAB", "P", "RGB", "RGBA", "RGBX", "YCbCr"] + +# raw modes that may be memory mapped. NOTE: if you change this, you +# may have to modify the stride calculation in map.c too! +_MAPMODES = ("L", "P", "RGBX", "RGBA", "CMYK", "I;16", "I;16L", "I;16B") + + +def getmodebase(mode): + """ + Gets the "base" mode for given mode. This function returns "L" for + images that contain grayscale data, and "RGB" for images that + contain color data. + + :param mode: Input mode. + :returns: "L" or "RGB". + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).basemode + + +def getmodetype(mode): + """ + Gets the storage type mode. Given a mode, this function returns a + single-layer mode suitable for storing individual bands. + + :param mode: Input mode. + :returns: "L", "I", or "F". + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).basetype + + +def getmodebandnames(mode): + """ + Gets a list of individual band names. Given a mode, this function returns + a tuple containing the names of individual bands (use + :py:method:`~PIL.Image.getmodetype` to get the mode used to store each + individual band. + + :param mode: Input mode. + :returns: A tuple containing band names. The length of the tuple + gives the number of bands in an image of the given mode. + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).bands + + +def getmodebands(mode): + """ + Gets the number of individual bands for this mode. + + :param mode: Input mode. + :returns: The number of bands in this mode. + :exception KeyError: If the input mode was not a standard mode. + """ + return len(ImageMode.getmode(mode).bands) + + +# -------------------------------------------------------------------- +# Helpers + +_initialized = 0 + + +def preinit(): + """Explicitly load standard file format drivers.""" + + global _initialized + if _initialized >= 1: + return + + try: + from . import BmpImagePlugin + + assert BmpImagePlugin + except ImportError: + pass + try: + from . import GifImagePlugin + + assert GifImagePlugin + except ImportError: + pass + try: + from . import JpegImagePlugin + + assert JpegImagePlugin + except ImportError: + pass + try: + from . import PpmImagePlugin + + assert PpmImagePlugin + except ImportError: + pass + try: + from . import PngImagePlugin + + assert PngImagePlugin + except ImportError: + pass + # try: + # import TiffImagePlugin + # assert TiffImagePlugin + # except ImportError: + # pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__(f"PIL.{plugin}", globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + decoder = DECODERS[decoder_name] + except KeyError: + pass + else: + return decoder(mode, *args + extra) + + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + except AttributeError as e: + raise OSError(f"decoder {decoder_name} not available") from e + return decoder(mode, *args + extra) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + encoder = ENCODERS[encoder_name] + except KeyError: + pass + else: + return encoder(mode, *args + extra) + + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + except AttributeError as e: + raise OSError(f"encoder {encoder_name} not available") from e + return encoder(mode, *args + extra) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + + +def coerce_e(value): + deprecate("coerce_e", 10) + return value if isinstance(value, _E) else _E(1, value) + + +# _E(scale, offset) represents the affine transformation scale * x + offset. +# The "data" field is named for compatibility with the old implementation, +# and should be renamed once coerce_e is removed. +class _E: + def __init__(self, scale, data): + self.scale = scale + self.data = data + + def __neg__(self): + return _E(-self.scale, -self.data) + + def __add__(self, other): + if isinstance(other, _E): + return _E(self.scale + other.scale, self.data + other.data) + return _E(self.scale, self.data + other) + + __radd__ = __add__ + + def __sub__(self, other): + return self + -other + + def __rsub__(self, other): + return other + -self + + def __mul__(self, other): + if isinstance(other, _E): + return NotImplemented + return _E(self.scale * other, self.data * other) + + __rmul__ = __mul__ + + def __truediv__(self, other): + if isinstance(other, _E): + return NotImplemented + return _E(self.scale / other, self.data / other) + + +def _getscaleoffset(expr): + a = expr(_E(1, 0)) + return (a.scale, a.data) if isinstance(a, _E) else (0, a) + + +# -------------------------------------------------------------------- +# Implementation wrapper + + +class Image: + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + + format = None + format_description = None + _close_exclusive_fp_after_loading = True + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self._size = (0, 0) + self.palette = None + self.info = {} + self._category = 0 + self.readonly = 0 + self.pyaccess = None + self._exif = None + + def __getattr__(self, name): + if name == "category": + deprecate("Image categories", 10, "is_animated", plural=True) + return self._category + raise AttributeError(name) + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + @property + def size(self): + return self._size + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new._size = im.size + if im.mode in ("P", "PA"): + if self.palette: + new.palette = self.palette.copy() + else: + from . import ImagePalette + + new.palette = ImagePalette.ImagePalette() + new.info = self.info.copy() + return new + + # Context manager support + def __enter__(self): + return self + + def __exit__(self, *args): + if hasattr(self, "fp") and getattr(self, "_exclusive_fp", False): + if getattr(self, "_fp", False): + if self._fp != self.fp: + self._fp.close() + self._fp = DeferredError(ValueError("Operation on closed image")) + if self.fp: + self.fp.close() + self.fp = None + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is required to close images that have multiple frames or + have not had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for + more information. + """ + try: + if getattr(self, "_fp", False): + if self._fp != self.fp: + self._fp.close() + self._fp = DeferredError(ValueError("Operation on closed image")) + if self.fp: + self.fp.close() + self.fp = None + except Exception as msg: + logger.debug("Error closing: %s", msg) + + if getattr(self, "map", None): + self.map = None + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = DeferredError(ValueError("Operation on closed image")) + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _ensure_mutable(self): + if self.readonly: + self._copy() + else: + self.load() + + def _dump(self, file=None, format=None, **options): + suffix = "" + if format: + suffix = "." + format + + if not file: + f, filename = tempfile.mkstemp(suffix) + os.close(f) + else: + filename = file + if not filename.endswith(suffix): + filename = filename + suffix + + self.load() + + if not format or format == "PPM": + self.im.save_ppm(filename) + else: + self.save(filename, format, **options) + + return filename + + def __eq__(self, other): + return ( + self.__class__ is other.__class__ + and self.mode == other.mode + and self.size == other.size + and self.info == other.info + and self._category == other._category + and self.getpalette() == other.getpalette() + and self.tobytes() == other.tobytes() + ) + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + id(self), + ) + + def _repr_pretty_(self, p, cycle): + """IPython plain text display support""" + + # Same as __repr__ but without unpredictable id(self), + # to keep Jupyter notebook `text/plain` output stable. + p.text( + "<%s.%s image mode=%s size=%dx%d>" + % ( + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + ) + ) + + def _repr_png_(self): + """iPython display hook support + + :returns: png version of the image as bytes + """ + b = io.BytesIO() + try: + self.save(b, "PNG") + except Exception as e: + raise ValueError("Could not save to PNG for display") from e + return b.getvalue() + + @property + def __array_interface__(self): + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new["shape"] = shape + new["typestr"] = typestr + new["version"] = 3 + try: + if self.mode == "1": + # Binary images need to be extended from bits to bytes + # See: https://github.com/python-pillow/Pillow/issues/350 + new["data"] = self.tobytes("raw", "L") + else: + new["data"] = self.tobytes() + except Exception as e: + if not isinstance(e, (MemoryError, RecursionError)): + try: + import numpy + from packaging.version import parse as parse_version + except ImportError: + pass + else: + if parse_version(numpy.__version__) < parse_version("1.23"): + warnings.warn(e) + raise + return new + + def __getstate__(self): + return [self.info, self.mode, self.size, self.getpalette(), self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self._size = size + self.im = core.new(mode, size) + if mode in ("L", "LA", "P", "PA") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + + A list of C encoders can be seen under + codecs section of the function array in + :file:`_imaging.c`. Python encoders are + registered within the relevant plugins. + :param args: Extra arguments to the encoder. + :returns: A :py:class:`bytes` object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + if self.width == 0 or self.height == 0: + return b"" + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError(f"encoder error {s} in tobytes") + + return b"".join(data) + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join( + [ + f"#define {name}_width {self.size[0]}\n".encode("ascii"), + f"#define {name}_height {self.size[1]}\n".encode("ascii"), + f"static char {name}_bits[] = {{\n".encode("ascii"), + data, + b"};", + ] + ) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. + + If the file associated with the image was opened by Pillow, then this + method will close it. The exception to this is if the image has + multiple frames, in which case the file will be left open for seek + operations. See :ref:`file-handling` for more information. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im is not None and self.palette and self.palette.dirty: + # realize palette + mode, arr = self.palette.getdata() + self.im.putpalette(mode, arr) + self.palette.dirty = 0 + self.palette.rawmode = None + if "transparency" in self.info and mode in ("LA", "PA"): + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + else: + palette_mode = "RGBA" if mode.startswith("RGBA") else "RGB" + self.palette.mode = palette_mode + self.palette.palette = self.im.getpalette(palette_mode, palette_mode) + + if self.im is not None: + if cffi and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from . import PyAccess + + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert( + self, mode=None, matrix=None, dither=None, palette=Palette.WEB, colors=256 + ): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK". The ``matrix`` argument only supports "L" + and "RGB". + + When translating a color image to greyscale (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is ``None``, all values larger than 127 are set to 255 (white), + all other values to 0 (black). To use other thresholds, use the + :py:meth:`~PIL.Image.Image.point` method. + + When converting from "RGBA" to "P" without a ``matrix`` argument, + this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, + and ``dither`` and ``palette`` are ignored. + + When converting from "PA", if an "RGBA" palette is present, the alpha + channel from the image will be used instead of the values from the palette. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` + (default). Note that this is not used when ``matrix`` is supplied. + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are :data:`Palette.WEB` or + :data:`Palette.ADAPTIVE`. + :param colors: Number of colors to use for the :data:`Palette.ADAPTIVE` + palette. Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + has_transparency = self.info.get("transparency") is not None + if not mode and self.mode == "P": + # determine default mode + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + if mode == "RGB" and has_transparency: + mode = "RGBA" + if not mode or (mode == self.mode and not matrix): + return self.copy() + + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + new = self._new(im) + if has_transparency and self.im.bands == 3: + transparency = new.info["transparency"] + + def convert_transparency(m, v): + v = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5 + return max(0, min(255, int(v))) + + if mode == "L": + transparency = convert_transparency(matrix, transparency) + elif len(mode) == 3: + transparency = tuple( + convert_transparency(matrix[i * 4 : i * 4 + 4], transparency) + for i in range(0, len(transparency)) + ) + new.info["transparency"] = transparency + return new + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if has_transparency: + if (self.mode in ("1", "L", "I") and mode in ("LA", "RGBA")) or ( + self.mode == "RGB" and mode == "RGBA" + ): + # Use transparent conversion to promote from transparent + # color to an alpha channel. + new_im = self._new( + self.im.convert_transparent(mode, self.info["transparency"]) + ) + del new_im.info["transparency"] + return new_im + elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"): + t = self.info["transparency"] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn( + "Palette images with Transparency expressed in bytes should be " + "converted to RGBA images" + ) + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == "P": + trns_im.putpalette(self.palette) + if isinstance(t, tuple): + err = "Couldn't allocate a palette color for transparency" + try: + t = trns_im.palette.getcolor(t, self) + except ValueError as e: + if str(e) == "cannot allocate more than 256 colors": + # If all 256 colors are in use, + # then there is no need for transparency + t = None + else: + raise ValueError(err) from e + if t is None: + trns = None + else: + trns_im.putpixel((0, 0), t) + + if mode in ("L", "RGB"): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert("RGB") + trns = trns_im.getpixel((0, 0)) + + elif self.mode == "P" and mode in ("LA", "PA", "RGBA"): + t = self.info["transparency"] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should be bytes or int") + + if mode == "P" and palette == Palette.ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from . import ImagePalette + + new.palette = ImagePalette.ImagePalette("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del new.info["transparency"] + if trns is not None: + try: + new.info["transparency"] = new.palette.getcolor(trns, new) + except Exception: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del new.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") + return new + + if "LAB" in (self.mode, mode): + other_mode = mode if self.mode == "LAB" else self.mode + if other_mode in ("RGB", "RGBA", "RGBX"): + from . import ImageCms + + srgb = ImageCms.createProfile("sRGB") + lab = ImageCms.createProfile("LAB") + profiles = [lab, srgb] if self.mode == "LAB" else [srgb, lab] + transform = ImageCms.buildTransform( + profiles[0], profiles[1], self.mode, mode + ) + return transform.apply(self) + + # colorspace conversion + if dither is None: + dither = Dither.FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + modebase = getmodebase(self.mode) + if modebase == self.mode: + raise + im = self.im.convert(modebase) + im = im.convert(mode, dither) + except KeyError as e: + raise ValueError("illegal conversion") from e + + new_im = self._new(im) + if mode == "P" and palette != Palette.ADAPTIVE: + from . import ImagePalette + + new_im.palette = ImagePalette.ImagePalette("RGB", list(range(256)) * 3) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del new_im.info["transparency"] + if trns is not None: + if new_im.mode == "P": + try: + new_im.info["transparency"] = new_im.palette.getcolor(trns, new_im) + except ValueError as e: + del new_im.info["transparency"] + if str(e) != "cannot allocate more than 256 colors": + # If all 256 colors are in use, + # then there is no need for transparency + warnings.warn( + "Couldn't allocate palette entry for transparency" + ) + else: + new_im.info["transparency"] = trns + return new_im + + def quantize( + self, + colors=256, + method=None, + kmeans=0, + palette=None, + dither=Dither.FLOYDSTEINBERG, + ): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: :data:`Quantize.MEDIANCUT` (median cut), + :data:`Quantize.MAXCOVERAGE` (maximum coverage), + :data:`Quantize.FASTOCTREE` (fast octree), + :data:`Quantize.LIBIMAGEQUANT` (libimagequant; check support + using :py:func:`PIL.features.check_feature` with + ``feature="libimagequant"``). + + By default, :data:`Quantize.MEDIANCUT` will be used. + + The exception to this is RGBA images. :data:`Quantize.MEDIANCUT` + and :data:`Quantize.MAXCOVERAGE` do not support RGBA images, so + :data:`Quantize.FASTOCTREE` is used by default instead. + :param kmeans: Integer + :param palette: Quantize to the palette of given + :py:class:`PIL.Image.Image`. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` + (default). + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = Quantize.MEDIANCUT + if self.mode == "RGBA": + method = Quantize.FASTOCTREE + + if self.mode == "RGBA" and method not in ( + Quantize.FASTOCTREE, + Quantize.LIBIMAGEQUANT, + ): + # Caller specified an invalid mode. + raise ValueError( + "Fast Octree (method == 2) and libimagequant (method == 3) " + "are the only valid methods for quantizing RGBA images" + ) + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", dither, palette.im) + new_im = self._new(im) + new_im.palette = palette.palette.copy() + return new_im + + im = self._new(self.im.quantize(colors, method, kmeans)) + + from . import ImagePalette + + mode = im.im.getpalettemode() + palette = im.im.getpalette(mode, mode)[: colors * len(mode)] + im.palette = ImagePalette.ImagePalette(mode, palette) + + return im + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + return self._new(self.im.copy()) + + __copy__ = copy + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. + + Note: Prior to Pillow 3.4.0, this was a lazy operation. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if box is None: + return self.copy() + + if box[2] < box[0]: + raise ValueError("Coordinate 'right' is less than 'left'") + elif box[3] < box[1]: + raise ValueError("Coordinate 'lower' is less than 'upper'") + + self.load() + return self._new(self._crop(self.im, box)) + + def _crop(self, im, box): + """ + Returns a rectangular region from the core image object im. + + This is equivalent to calling im.crop((x0, y0, x1, y1)), but + includes additional sanity checks. + + :param im: a core image object + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :returns: A core image object. + """ + + x0, y0, x1, y1 = map(int, map(round, box)) + + absolute_values = (abs(x1 - x0), abs(y1 - y0)) + + _decompression_bomb_check(absolute_values) + + return im.crop((x0, y0, x1, y1)) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it. + + If any changes are made, returns a tuple with the chosen ``mode`` and + ``box`` with coordinates of the original image within the altered one. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + Note: This method is not implemented for most images. It is + currently implemented only for JPEG and MPO images. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object.""" + + from . import ImageFilter + + self.load() + + if isinstance(filter, Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError( + "filter argument should be ImageFilter.Filter instance or class" + ) + + multiband = isinstance(filter, ImageFilter.MultibandFilter) + if self.im.bands == 1 or multiband: + return self._new(filter.filter(self.im)) + + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, ``getbands`` on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. See + :ref:`coordinate-system`. If the image is completely empty, this + method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + The colors will be in the image's mode. For example, an RGB image will + return a tuple of (red, green, blue) color values, and a P image will + return the index of the color in the palette. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use ``list(im.getdata())``. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def _getxmp(self, xmp_tags): + def get_name(tag): + return tag.split("}")[1] + + def get_value(element): + value = {get_name(k): v for k, v in element.attrib.items()} + children = list(element) + if children: + for child in children: + name = get_name(child.tag) + child_value = get_value(child) + if name in value: + if not isinstance(value[name], list): + value[name] = [value[name]] + value[name].append(child_value) + else: + value[name] = child_value + elif value: + if element.text: + value["text"] = element.text + else: + return element.text + return value + + if ElementTree is None: + warnings.warn("XMP data cannot be read without defusedxml dependency") + return {} + else: + root = ElementTree.fromstring(xmp_tags) + return {get_name(root.tag): get_value(root)} + + def getexif(self): + if self._exif is None: + self._exif = Exif() + self._exif._loaded = False + elif self._exif._loaded: + return self._exif + self._exif._loaded = True + + exif_info = self.info.get("exif") + if exif_info is None: + if "Raw profile type exif" in self.info: + exif_info = bytes.fromhex( + "".join(self.info["Raw profile type exif"].split("\n")[3:]) + ) + elif hasattr(self, "tag_v2"): + self._exif.bigtiff = self.tag_v2._bigtiff + self._exif.endian = self.tag_v2._endian + self._exif.load_from_fp(self.fp, self.tag_v2._offset) + if exif_info is not None: + self._exif.load(exif_info) + + # XMP tags + if 0x0112 not in self._exif: + xmp_tags = self.info.get("XML:com.adobe.xmp") + if xmp_tags: + match = re.search(r'tiff:Orientation(="|>)([0-9])', xmp_tags) + if match: + self._exif[0x0112] = int(match[2]) + + return self._exif + + def _reload_exif(self): + if self._exif is None or not self._exif._loaded: + return + self._exif._loaded = False + self.getexif() + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self, rawmode="RGB"): + """ + Returns the image palette as a list. + + :param rawmode: The mode in which to return the palette. ``None`` will + return the palette in its current mode. + + .. versionadded:: 9.1.0 + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + mode = self.im.getpalettemode() + except ValueError: + return None # no palette + if rawmode is None: + rawmode = mode + return list(self.im.getpalette(mode, rawmode)) + + def apply_transparency(self): + """ + If a P mode image has a "transparency" key in the info dictionary, + remove the key and apply the transparency to the palette instead. + """ + if self.mode != "P" or "transparency" not in self.info: + return + + from . import ImagePalette + + palette = self.getpalette("RGBA") + transparency = self.info["transparency"] + if isinstance(transparency, bytes): + for i, alpha in enumerate(transparency): + palette[i * 4 + 3] = alpha + else: + palette[transparency * 4 + 3] = 0 + self.palette = ImagePalette.ImagePalette("RGBA", bytes(palette)) + self.palette.dirty = 1 + + del self.info["transparency"] + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return list(x), list(y) + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as a + list of pixel counts, one for each pixel value in the source + image. Counts are grouped into 256 bins for each band, even if + the image has more than 8 bits per band. If the image has more + than one band, the histograms for all bands are concatenated (for + example, the histogram for an "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def entropy(self, mask=None, extrema=None): + """ + Calculates and returns the entropy for the image. + + A bilevel image (mode "1") is treated as a greyscale ("L") + image by this method. + + If a mask is provided, the method employs the histogram for + those parts of the image where the mask image is non-zero. + The mask image must have the same size as the image, and be + either a bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A float value representing the image entropy + """ + self.load() + if mask: + mask.load() + return self.im.entropy((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.entropy(extrema) + return self.im.entropy() + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size + of the pasted image must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L", "LA", "RGBA" + or "RGBa" images (if present, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + box = (0, 0) + + if len(box) == 2: + # upper left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError("cannot determine region size; use 4-item box") + box += (box[0] + size[0], box[1] + size[1]) + + if isinstance(im, str): + from . import ImageColor + + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("LA", "RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self._ensure_mutable() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def alpha_composite(self, im, dest=(0, 0), source=(0, 0)): + """'In-place' analog of Image.alpha_composite. Composites an image + onto this image. + + :param im: image to composite over this one + :param dest: Optional 2 tuple (left, top) specifying the upper + left corner in this (destination) image. + :param source: Optional 2 (left, top) tuple for the upper left + corner in the overlay source image, or 4 tuple (left, top, right, + bottom) for the bounds of the source rectangle + + Performance Note: Not currently implemented in-place in the core layer. + """ + + if not isinstance(source, (list, tuple)): + raise ValueError("Source must be a tuple") + if not isinstance(dest, (list, tuple)): + raise ValueError("Destination must be a tuple") + if not len(source) in (2, 4): + raise ValueError("Source must be a 2 or 4-tuple") + if not len(dest) == 2: + raise ValueError("Destination must be a 2-tuple") + if min(source) < 0: + raise ValueError("Source must be non-negative") + + if len(source) == 2: + source = source + im.size + + # over image, crop if it's not the whole thing. + if source == (0, 0) + im.size: + overlay = im + else: + overlay = im.crop(source) + + # target for the paste + box = dest + (dest[0] + overlay.width, dest[1] + overlay.height) + + # destination image. don't copy if we're using the whole image. + if box == (0, 0) + self.size: + background = self + else: + background = self.crop(box) + + result = alpha_composite(background, overlay) + self.paste(result, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65536 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + + It may also be an :py:class:`~PIL.Image.ImagePointHandler` + object:: + + class Example(Image.ImagePointHandler): + def point(self, data): + # Return result + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + if mode != "F": + lut = [round(i) for i in lut] + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self._ensure_mutable() + + if self.mode not in ("LA", "PA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + except (AttributeError, ValueError) as e: + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "PA", "RGBA"): + raise ValueError from e # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except KeyError as e: + raise ValueError("illegal image mode") from e + + if self.mode in ("LA", "PA"): + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data from a flattened sequence object into the image. The + values should start at the upper left corner (0, 0), continue to the + end of the line, followed directly by the first value of the second + line, and so on. Data will be read until either the image or the + sequence ends. The scale and offset values are used to adjust the + sequence values: **pixel = value*scale + offset**. + + :param data: A flattened sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self._ensure_mutable() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P", "PA", "L" + or "LA" image. + + The palette sequence must contain at most 256 colors, made up of one + integer value for each channel in the raw mode. + For example, if the raw mode is "RGB", then it can contain at most 768 + values, made up of red, green and blue values for the corresponding pixel + index in the 256 colors. + If the raw mode is "RGBA", then it can contain at most 1024 values, + containing red, green, blue and alpha values. + + Alternatively, an 8-bit string may be used instead of an integer sequence. + + :param data: A palette sequence (either a list or a string). + :param rawmode: The raw mode of the palette. Either "RGB", "RGBA", or a mode + that can be transformed to "RGB" or "RGBA" (e.g. "R", "BGR;15", "RGBA;L"). + """ + from . import ImagePalette + + if self.mode not in ("L", "LA", "P", "PA"): + raise ValueError("illegal image mode") + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "PA" if "A" in self.mode else "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. In addition to this, RGB and RGBA tuples are + accepted for P and PA images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param value: The pixel value. + """ + + if self.readonly: + self._copy() + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + + if ( + self.mode in ("P", "PA") + and isinstance(value, (list, tuple)) + and len(value) in [3, 4] + ): + # RGB or RGBA value for a P or PA image + if self.mode == "PA": + alpha = value[3] if len(value) == 4 else 255 + value = value[:3] + value = self.palette.getcolor(value, self) + if self.mode == "PA": + value = (value, alpha) + return self.im.putpixel(xy, value) + + def remap_palette(self, dest_map, source_palette=None): + """ + Rewrites the image to reorder the palette. + + :param dest_map: A list of indexes into the original palette. + e.g. ``[1,0]`` would swap a two item palette, and ``list(range(256))`` + is the identity transform. + :param source_palette: Bytes or None. + :returns: An :py:class:`~PIL.Image.Image` object. + + """ + from . import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + + bands = 3 + palette_mode = "RGB" + if source_palette is None: + if self.mode == "P": + self.load() + palette_mode = self.im.getpalettemode() + if palette_mode == "RGBA": + bands = 4 + source_palette = self.im.getpalette(palette_mode, palette_mode) + else: # L-mode + source_palette = bytearray(i // 3 for i in range(768)) + + palette_bytes = b"" + new_positions = [0] * 256 + + # pick only the used colors from the palette + for i, oldPosition in enumerate(dest_map): + palette_bytes += source_palette[ + oldPosition * bands : oldPosition * bands + bands + ] + new_positions[oldPosition] = i + + # replace the palette color id of all pixel with the new id + + # Palette images are [0..255], mapped through a 1 or 3 + # byte/color map. We need to remap the whole image + # from palette 1 to palette 2. New_positions is + # an array of indexes into palette 1. Palette 2 is + # palette 1 with any holes removed. + + # We're going to leverage the convert mechanism to use the + # C code to remap the image from palette 1 to palette 2, + # by forcing the source image into 'L' mode and adding a + # mapping 'L' mode palette, then converting back to 'L' + # sans palette thus converting the image bytes, then + # assigning the optimized RGB palette. + + # perf reference, 9500x4000 gif, w/~135 colors + # 14 sec prepatch, 1 sec postpatch with optimization forced. + + mapping_palette = bytearray(new_positions) + + m_im = self.copy() + m_im.mode = "P" + + m_im.palette = ImagePalette.ImagePalette( + palette_mode, palette=mapping_palette * bands + ) + # possibly set palette dirty, then + # m_im.putpalette(mapping_palette, 'L') # converts to 'P' + # or just force it. + # UNDONE -- this is part of the general issue with palettes + m_im.im.putpalette(palette_mode + ";L", m_im.palette.tobytes()) + + m_im = m_im.convert("L") + + m_im.putpalette(palette_bytes, palette_mode) + m_im.palette = ImagePalette.ImagePalette(palette_mode, palette=palette_bytes) + + if "transparency" in self.info: + try: + m_im.info["transparency"] = dest_map.index(self.info["transparency"]) + except ValueError: + if "transparency" in m_im.info: + del m_im.info["transparency"] + + return m_im + + def _get_safe_box(self, size, resample, box): + """Expands the box so it includes adjacent pixels + that may be used by resampling with the given resampling filter. + """ + filter_support = _filters_support[resample] - 0.5 + scale_x = (box[2] - box[0]) / size[0] + scale_y = (box[3] - box[1]) / size[1] + support_x = filter_support * scale_x + support_y = filter_support * scale_y + + return ( + max(0, int(box[0] - support_x)), + max(0, int(box[1] - support_y)), + min(self.size[0], math.ceil(box[2] + support_x)), + min(self.size[1], math.ceil(box[3] + support_y)), + ) + + def resize(self, size, resample=None, box=None, reducing_gap=None): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, + :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, + :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. + If the image has mode "1" or "P", it is always set to + :py:data:`Resampling.NEAREST`. If the image mode specifies a number + of bits, such as "I;16", then the default filter is + :py:data:`Resampling.NEAREST`. Otherwise, the default filter is + :py:data:`Resampling.BICUBIC`. See: :ref:`concept-filters`. + :param box: An optional 4-tuple of floats providing + the source image region to be scaled. + The values must be within (0, 0, width, height) rectangle. + If omitted or None, the entire source is used. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce`. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is None (no optimization). + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample is None: + type_special = ";" in self.mode + resample = Resampling.NEAREST if type_special else Resampling.BICUBIC + elif resample not in ( + Resampling.NEAREST, + Resampling.BILINEAR, + Resampling.BICUBIC, + Resampling.LANCZOS, + Resampling.BOX, + Resampling.HAMMING, + ): + message = f"Unknown resampling filter ({resample})." + + filters = [ + f"{filter[1]} ({filter[0]})" + for filter in ( + (Resampling.NEAREST, "Image.Resampling.NEAREST"), + (Resampling.LANCZOS, "Image.Resampling.LANCZOS"), + (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), + (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), + (Resampling.BOX, "Image.Resampling.BOX"), + (Resampling.HAMMING, "Image.Resampling.HAMMING"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) + + if reducing_gap is not None and reducing_gap < 1.0: + raise ValueError("reducing_gap must be 1.0 or greater") + + size = tuple(size) + + self.load() + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if self.size == size and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ("1", "P"): + resample = Resampling.NEAREST + + if self.mode in ["LA", "RGBA"] and resample != Resampling.NEAREST: + im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + im = im.resize(size, resample, box) + return im.convert(self.mode) + + self.load() + + if reducing_gap is not None and resample != Resampling.NEAREST: + factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1 + factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1 + if factor_x > 1 or factor_y > 1: + reduce_box = self._get_safe_box(size, resample, box) + factor = (factor_x, factor_y) + if callable(self.reduce): + self = self.reduce(factor, box=reduce_box) + else: + self = Image.reduce(self, factor, box=reduce_box) + box = ( + (box[0] - reduce_box[0]) / factor_x, + (box[1] - reduce_box[1]) / factor_y, + (box[2] - reduce_box[0]) / factor_x, + (box[3] - reduce_box[1]) / factor_y, + ) + + return self._new(self.im.resize(size, resample, box)) + + def reduce(self, factor, box=None): + """ + Returns a copy of the image reduced ``factor`` times. + If the size of the image is not dividable by ``factor``, + the resulting size will be rounded up. + + :param factor: A greater than 0 integer or tuple of two integers + for width and height separately. + :param box: An optional 4-tuple of ints providing + the source image region to be reduced. + The values must be within ``(0, 0, width, height)`` rectangle. + If omitted or ``None``, the entire source is used. + """ + if not isinstance(factor, (list, tuple)): + factor = (factor, factor) + + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if factor == (1, 1) and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ["LA", "RGBA"]: + im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + im = im.reduce(factor, box) + return im.convert(self.mode) + + self.load() + + return self._new(self.im.reduce(factor, box)) + + def rotate( + self, + angle, + resample=Resampling.NEAREST, + expand=0, + center=None, + translate=None, + fillcolor=None, + ): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:data:`Resampling.NEAREST` (use nearest neighbour), + :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`Resampling.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image has + mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. + See :ref:`concept-filters`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. Note that the expand flag assumes rotation around + the center and no translation. + :param center: Optional center of rotation (a 2-tuple). Origin is + the upper left corner. Default is the center of the image. + :param translate: An optional post-rotate translation (a 2-tuple). + :param fillcolor: An optional color for area outside the rotated image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + angle = angle % 360.0 + + # Fast paths regardless of filter, as long as we're not + # translating or changing the center. + if not (center or translate): + if angle == 0: + return self.copy() + if angle == 180: + return self.transpose(Transpose.ROTATE_180) + if angle in (90, 270) and (expand or self.width == self.height): + return self.transpose( + Transpose.ROTATE_90 if angle == 90 else Transpose.ROTATE_270 + ) + + # Calculate the affine matrix. Note that this is the reverse + # transformation (from destination image to source) because we + # want to interpolate the (discrete) destination pixel from + # the local area around the (floating) source pixel. + + # The matrix we actually want (note that it operates from the right): + # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx) + # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy) + # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1) + + # The reverse matrix is thus: + # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx) + # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty) + # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1) + + # In any case, the final translation may be updated at the end to + # compensate for the expand flag. + + w, h = self.size + + if translate is None: + post_trans = (0, 0) + else: + post_trans = translate + if center is None: + # FIXME These should be rounded to ints? + rotn_center = (w / 2.0, h / 2.0) + else: + rotn_center = center + + angle = -math.radians(angle) + matrix = [ + round(math.cos(angle), 15), + round(math.sin(angle), 15), + 0.0, + round(-math.sin(angle), 15), + round(math.cos(angle), 15), + 0.0, + ] + + def transform(x, y, matrix): + (a, b, c, d, e, f) = matrix + return a * x + b * y + c, d * x + e * y + f + + matrix[2], matrix[5] = transform( + -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix + ) + matrix[2] += rotn_center[0] + matrix[5] += rotn_center[1] + + if expand: + # calculate output size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y, matrix) + xx.append(x) + yy.append(y) + nw = math.ceil(max(xx)) - math.floor(min(xx)) + nh = math.ceil(max(yy)) - math.floor(min(yy)) + + # We multiply a translation matrix from the right. Because of its + # special form, this is the same as taking the image of the + # translation vector as new translation vector. + matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix) + w, h = nw, nh + + return self.transform( + (w, h), Transform.AFFINE, matrix, resample, fillcolor=fillcolor + ) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param params: Extra parameters to the image writer. + :returns: None + :exception ValueError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception OSError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isinstance(fp, Path): + filename = str(fp) + open_fp = True + elif is_path(fp): + filename = fp + open_fp = True + elif fp == sys.stdout: + try: + fp = sys.stdout.buffer + except AttributeError: + pass + if not filename and hasattr(fp, "name") and is_path(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self._ensure_mutable() + + save_all = params.pop("save_all", False) + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + try: + format = EXTENSION[ext] + except KeyError as e: + raise ValueError(f"unknown file extension: {ext}") from e + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + created = False + if open_fp: + created = not os.path.exists(filename) + if params.get("append", False): + # Open also for reading ("+"), because TIFF save_all + # writer needs to go back and edit the written data. + fp = builtins.open(filename, "r+b") + else: + fp = builtins.open(filename, "w+b") + + try: + save_handler(self, fp, filename) + except Exception: + if open_fp: + fp.close() + if created: + try: + os.remove(filename) + except PermissionError: + pass + raise + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + ``EOFError`` exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + See :py:meth:`~PIL.Image.Image.tell`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None): + """ + Displays this image. This method is mainly intended for debugging purposes. + + This method calls :py:func:`PIL.ImageShow.show` internally. You can use + :py:func:`PIL.ImageShow.register` to override its default behaviour. + + The image is first saved to a temporary file. By default, it will be in + PNG format. + + On Unix, the image is then opened using the **display**, **eog** or + **xv** utility, depending on which one can be found. + + On macOS, the image is opened with the native Preview application. + + On Windows, the image is opened with the standard PNG display utility. + + :param title: Optional title to use for the image window, where possible. + """ + + _show(self, title=title) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + If you need only one band, :py:meth:`~PIL.Image.Image.getchannel` + method can be more convenient and faster. + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = map(self._new, self.im.split()) + return tuple(ims) + + def getchannel(self, channel): + """ + Returns an image containing a single channel of the source image. + + :param channel: What channel to return. Could be index + (0 for "R" channel of "RGB") or channel name + ("A" for alpha channel of "RGBA"). + :returns: An image in "L" mode. + + .. versionadded:: 4.3.0 + """ + self.load() + + if isinstance(channel, str): + try: + channel = self.getbands().index(channel) + except ValueError as e: + raise ValueError(f'The image has no channel "{channel}"') from e + + return self._new(self.im.getband(channel)) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=Resampling.BICUBIC, reducing_gap=2.0): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, + :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, + :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. + If omitted, it defaults to :py:data:`Resampling.BICUBIC`. + (was :py:data:`Resampling.NEAREST` prior to version 2.5.0). + See: :ref:`concept-filters`. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce` or + :py:meth:`~PIL.Image.Image.draft` for JPEG images. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is 2.0 (very close to fair resampling + while still being faster in many cases). + :returns: None + """ + + provided_size = tuple(map(math.floor, size)) + + def preserve_aspect_ratio(): + def round_aspect(number, key): + return max(min(math.floor(number), math.ceil(number), key=key), 1) + + x, y = provided_size + if x >= self.width and y >= self.height: + return + + aspect = self.width / self.height + if x / y >= aspect: + x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y)) + else: + y = round_aspect( + x / aspect, key=lambda n: 0 if n == 0 else abs(aspect - x / n) + ) + return x, y + + box = None + if reducing_gap is not None: + size = preserve_aspect_ratio() + if size is None: + return + + res = self.draft(None, (size[0] * reducing_gap, size[1] * reducing_gap)) + if res is not None: + box = res[1] + if box is None: + self.load() + + # load() may have changed the size of the image + size = preserve_aspect_ratio() + if size is None: + return + + if self.size != size: + im = self.resize(size, resample, box=box, reducing_gap=reducing_gap) + + self.im = im.im + self._size = size + self.mode = self.im.mode + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform( + self, + size, + method, + data=None, + resample=Resampling.NEAREST, + fill=1, + fillcolor=None, + ): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:data:`Transform.EXTENT` (cut out a rectangular subregion), + :py:data:`Transform.AFFINE` (affine transform), + :py:data:`Transform.PERSPECTIVE` (perspective transform), + :py:data:`Transform.QUAD` (map a quadrilateral to a rectangle), or + :py:data:`Transform.MESH` (map a number of source quadrilaterals + in one operation). + + It may also be an :py:class:`~PIL.Image.ImageTransformHandler` + object:: + + class Example(Image.ImageTransformHandler): + def transform(self, size, data, resample, fill=1): + # Return result + + It may also be an object with a ``method.getdata`` method + that returns a tuple supplying new ``method`` and ``data`` values:: + + class Example: + def getdata(self): + method = Image.Transform.EXTENT + data = (0, 0, 100, 100) + return method, data + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:data:`Resampling.NEAREST` (use nearest neighbour), + :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`Resampling.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. + See: :ref:`concept-filters`. + :param fill: If ``method`` is an + :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of + the arguments passed to it. Otherwise, it is unused. + :param fillcolor: Optional fill color for the area outside the + transform in the output image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode in ("LA", "RGBA") and resample != Resampling.NEAREST: + return ( + self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + .transform(size, method, data, resample, fill, fillcolor) + .convert(self.mode) + ) + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, fillcolor) + if self.mode == "P" and self.palette: + im.palette = self.palette.copy() + im.info = self.info.copy() + if method == Transform.MESH: + # list of quads + for box, quad in data: + im.__transformer( + box, self, Transform.QUAD, quad, resample, fillcolor is None + ) + else: + im.__transformer( + (0, 0) + size, self, method, data, resample, fillcolor is None + ) + + return im + + def __transformer( + self, box, image, method, data, resample=Resampling.NEAREST, fill=1 + ): + w = box[2] - box[0] + h = box[3] - box[1] + + if method == Transform.AFFINE: + data = data[:6] + + elif method == Transform.EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = (x1 - x0) / w + ys = (y1 - y0) / h + method = Transform.AFFINE + data = (xs, 0, x0, 0, ys, y0) + + elif method == Transform.PERSPECTIVE: + data = data[:8] + + elif method == Transform.QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = ( + x0, + (ne[0] - x0) * As, + (sw[0] - x0) * At, + (se[0] - sw[0] - ne[0] + x0) * As * At, + y0, + (ne[1] - y0) * As, + (sw[1] - y0) * At, + (se[1] - sw[1] - ne[1] + y0) * As * At, + ) + + else: + raise ValueError("unknown transformation method") + + if resample not in ( + Resampling.NEAREST, + Resampling.BILINEAR, + Resampling.BICUBIC, + ): + if resample in (Resampling.BOX, Resampling.HAMMING, Resampling.LANCZOS): + message = { + Resampling.BOX: "Image.Resampling.BOX", + Resampling.HAMMING: "Image.Resampling.HAMMING", + Resampling.LANCZOS: "Image.Resampling.LANCZOS", + }[resample] + f" ({resample}) cannot be used." + else: + message = f"Unknown resampling filter ({resample})." + + filters = [ + f"{filter[1]} ({filter[0]})" + for filter in ( + (Resampling.NEAREST, "Image.Resampling.NEAREST"), + (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), + (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = Resampling.NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:data:`Transpose.FLIP_LEFT_RIGHT`, + :py:data:`Transpose.FLIP_TOP_BOTTOM`, :py:data:`Transpose.ROTATE_90`, + :py:data:`Transpose.ROTATE_180`, :py:data:`Transpose.ROTATE_270`, + :py:data:`Transpose.TRANSPOSE` or :py:data:`Transpose.TRANSVERSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + return self._new(self.im.effect_spread(distance)) + + def toqimage(self): + """Returns a QImage copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Abstract handlers. + + +class ImagePointHandler: + """ + Used as a mixin by point transforms + (for use with :py:meth:`~PIL.Image.Image.point`) + """ + + pass + + +class ImageTransformHandler: + """ + Used as a mixin by geometry transforms + (for use with :py:meth:`~PIL.Image.Image.transform`) + """ + + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + + +def _wedge(): + """Create greyscale wedge (for debugging only)""" + + return Image()._new(core.wedge("L")) + + +def _check_size(size): + """ + Common check to enforce type and sanity check on size tuples + + :param size: Should be a 2 tuple of (width, height) + :returns: True, or raises a ValueError + """ + + if not isinstance(size, (list, tuple)): + raise ValueError("Size must be a tuple") + if len(size) != 2: + raise ValueError("Size must be a tuple of length 2") + if size[0] < 0 or size[1] < 0: + raise ValueError("Width and height must be >= 0") + + return True + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isinstance(color, str): + # css3-style specifier + + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + + im = Image() + if mode == "P" and isinstance(color, (list, tuple)) and len(color) in [3, 4]: + # RGB or RGBA value for a P image + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette() + color = im.palette.getcolor(color) + return im._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Codec `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.frombytes`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + args = mode, 0, 1 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new(core.map_buffer(data, size, decoder_name, 0, args)) + if mode == "P": + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette("RGB", im.im.getpalette("RGB")) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If ``obj`` is not contiguous, then the ``tobytes`` method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + If you have an image in NumPy:: + + from PIL import Image + import numpy as np + im = Image.open("hopper.jpg") + a = np.asarray(im) + + Then this can be used to convert it to a Pillow image:: + + im = Image.fromarray(a) + + :param obj: Object with array interface + :param mode: Optional mode to use when reading ``obj``. Will be determined from + type if ``None``. + + This will not be used to convert the data after reading, but will be used to + change how the data is read:: + + from PIL import Image + import numpy as np + a = np.full((1, 1), 300) + im = Image.fromarray(a, mode="L") + im.getpixel((0, 0)) # 44 + im = Image.fromarray(a, mode="RGB") + im.getpixel((0, 0)) # (44, 1, 0) + + See: :ref:`concept-modes` for general information about modes. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr["shape"] + ndim = len(shape) + strides = arr.get("strides", None) + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr["typestr"] + except KeyError as e: + raise TypeError("Cannot handle this data type") from e + try: + mode, rawmode = _fromarray_typemap[typekey] + except KeyError as e: + raise TypeError("Cannot handle this data type: %s, %s" % typekey) from e + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError(f"Too many dimensions: {ndim} > {ndmax}.") + + size = 1 if ndim == 1 else shape[1], shape[0] + if strides is not None: + if hasattr(obj, "tobytes"): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + ((1, 1), "|b1"): ("1", "1;8"), + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "u2"): ("I", "I;16B"), + ((1, 1), "i2"): ("I", "I;16BS"), + ((1, 1), "u4"): ("I", "I;32B"), + ((1, 1), "i4"): ("I", "I;32BS"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 2), "|u1"): ("LA", "LA"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + # shortcuts: + ((1, 1), _ENDIAN + "i4"): ("I", "I"), + ((1, 1), _ENDIAN + "f4"): ("F", "F"), +} + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > 2 * MAX_IMAGE_PIXELS: + raise DecompressionBombError( + f"Image size ({pixels} pixels) exceeds limit of {2 * MAX_IMAGE_PIXELS} " + "pixels, could be decompression bomb DOS attack." + ) + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + f"Image size ({pixels} pixels) exceeds limit of {MAX_IMAGE_PIXELS} pixels, " + "could be decompression bomb DOS attack.", + DecompressionBombWarning, + ) + + +def open(fp, mode="r", formats=None): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. See :ref:`file-handling`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement ``file.read``, + ``file.seek``, and ``file.tell`` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :param formats: A list or tuple of formats to attempt to load the file in. + This can be used to restrict the set of formats checked. + Pass ``None`` to try all supported formats. You can print the set of + available formats by running ``python3 -m PIL`` or using + the :py:func:`PIL.features.pilinfo` function. + :returns: An :py:class:`~PIL.Image.Image` object. + :exception FileNotFoundError: If the file cannot be found. + :exception PIL.UnidentifiedImageError: If the image cannot be opened and + identified. + :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO`` + instance is used for ``fp``. + :exception TypeError: If ``formats`` is not ``None``, a list or a tuple. + """ + + if mode != "r": + raise ValueError(f"bad mode {repr(mode)}") + elif isinstance(fp, io.StringIO): + raise ValueError( + "StringIO cannot be used to open an image. " + "Binary data must be used instead." + ) + + if formats is None: + formats = ID + elif not isinstance(formats, (list, tuple)): + raise TypeError("formats must be a list or tuple") + + exclusive_fp = False + filename = "" + if isinstance(fp, Path): + filename = str(fp.resolve()) + elif is_path(fp): + filename = fp + + if filename: + fp = builtins.open(filename, "rb") + exclusive_fp = True + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + exclusive_fp = True + + prefix = fp.read(16) + + preinit() + + accept_warnings = [] + + def _open_core(fp, filename, prefix, formats): + for i in formats: + i = i.upper() + if i not in OPEN: + init() + try: + factory, accept = OPEN[i] + result = not accept or accept(prefix) + if type(result) in [str, bytes]: + accept_warnings.append(result) + elif result: + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + except BaseException: + if exclusive_fp: + fp.close() + raise + return None + + im = _open_core(fp, filename, prefix, formats) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix, formats) + + if im: + im._exclusive_fp = exclusive_fp + return im + + if exclusive_fp: + fp.close() + for message in accept_warnings: + warnings.warn(message) + raise UnidentifiedImageError( + "cannot identify image file %r" % (filename if filename else fp) + ) + + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. Must have mode RGBA. + :param im2: The second image. Must have mode RGBA, and the same size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for band in bands[1:]: + if band.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if band.size != bands[0].size: + raise ValueError("size mismatch") + for band in bands: + band.load() + return bands[0]._new(core.merge(mode, *[b.im for b in bands])) + + +# -------------------------------------------------------------------- +# Plugin registry + + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +def register_extensions(id, extensions): + """ + Registers image extensions. This function should not be + used in application code. + + :param id: An image format identifier. + :param extensions: A list of extensions used for this format. + """ + for extension in extensions: + register_extension(id, extension) + + +def registered_extensions(): + """ + Returns a dictionary containing all file extensions belonging + to registered plugins + """ + if not EXTENSION: + init() + return EXTENSION + + +def register_decoder(name, decoder): + """ + Registers an image decoder. This function should not be + used in application code. + + :param name: The name of the decoder + :param decoder: A callable(mode, args) that returns an + ImageFile.PyDecoder object + + .. versionadded:: 4.1.0 + """ + DECODERS[name] = decoder + + +def register_encoder(name, encoder): + """ + Registers an image encoder. This function should not be + used in application code. + + :param name: The name of the encoder + :param encoder: A callable(mode, args) that returns an + ImageFile.PyEncoder object + + .. versionadded:: 4.1.0 + """ + ENCODERS[name] = encoder + + +# -------------------------------------------------------------------- +# Simple display support. + + +def _show(image, **options): + from . import ImageShow + + ImageShow.show(image, **options) + + +# -------------------------------------------------------------------- +# Effects + + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y1). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + + +def linear_gradient(mode): + """ + Generate 256x256 linear gradient from black to white, top to bottom. + + :param mode: Input mode. + """ + return Image()._new(core.linear_gradient(mode)) + + +def radial_gradient(mode): + """ + Generate 256x256 radial gradient from black to white, centre to edge. + + :param mode: Input mode. + """ + return Image()._new(core.radial_gradient(mode)) + + +# -------------------------------------------------------------------- +# Resources + + +def _apply_env_variables(env=None): + if env is None: + env = os.environ + + for var_name, setter in [ + ("PILLOW_ALIGNMENT", core.set_alignment), + ("PILLOW_BLOCK_SIZE", core.set_block_size), + ("PILLOW_BLOCKS_MAX", core.set_blocks_max), + ]: + if var_name not in env: + continue + + var = env[var_name].lower() + + units = 1 + for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]: + if var.endswith(postfix): + units = mul + var = var[: -len(postfix)] + + try: + var = int(var) * units + except ValueError: + warnings.warn(f"{var_name} is not int") + continue + + try: + setter(var) + except ValueError as e: + warnings.warn(f"{var_name}: {e}") + + +_apply_env_variables() +atexit.register(core.clear_cache) + + +class Exif(MutableMapping): + endian = None + bigtiff = False + + def __init__(self): + self._data = {} + self._ifds = {} + self._info = None + self._loaded_exif = None + + def _fixup(self, value): + try: + if len(value) == 1 and isinstance(value, tuple): + return value[0] + except Exception: + pass + return value + + def _fixup_dict(self, src_dict): + # Helper function + # returns a dict with any single item tuples/lists as individual values + return {k: self._fixup(v) for k, v in src_dict.items()} + + def _get_ifd_dict(self, offset): + try: + # an offset pointer to the location of the nested embedded IFD. + # It should be a long, but may be corrupted. + self.fp.seek(offset) + except (KeyError, TypeError): + pass + else: + from . import TiffImagePlugin + + info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + info.load(self.fp) + return self._fixup_dict(info) + + def _get_head(self): + version = b"\x2B" if self.bigtiff else b"\x2A" + if self.endian == "<": + head = b"II" + version + b"\x00" + o32le(8) + else: + head = b"MM\x00" + version + o32be(8) + if self.bigtiff: + head += o32le(8) if self.endian == "<" else o32be(8) + head += b"\x00\x00\x00\x00" + return head + + def load(self, data): + # Extract EXIF information. This is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + if data == self._loaded_exif: + return + self._loaded_exif = data + self._data.clear() + self._ifds.clear() + if data and data.startswith(b"Exif\x00\x00"): + data = data[6:] + if not data: + self._info = None + return + + self.fp = io.BytesIO(data) + self.head = self.fp.read(8) + # process dictionary + from . import TiffImagePlugin + + self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + self.endian = self._info._endian + self.fp.seek(self._info.next) + self._info.load(self.fp) + + def load_from_fp(self, fp, offset=None): + self._loaded_exif = None + self._data.clear() + self._ifds.clear() + + # process dictionary + from . import TiffImagePlugin + + self.fp = fp + if offset is not None: + self.head = self._get_head() + else: + self.head = self.fp.read(8) + self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + if self.endian is None: + self.endian = self._info._endian + if offset is None: + offset = self._info.next + self.fp.seek(offset) + self._info.load(self.fp) + + def _get_merged_dict(self): + merged_dict = dict(self) + + # get EXIF extension + if 0x8769 in self: + ifd = self._get_ifd_dict(self[0x8769]) + if ifd: + merged_dict.update(ifd) + + # GPS + if 0x8825 in self: + merged_dict[0x8825] = self._get_ifd_dict(self[0x8825]) + + return merged_dict + + def tobytes(self, offset=8): + from . import TiffImagePlugin + + head = self._get_head() + ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head) + for tag, value in self.items(): + if tag in [0x8769, 0x8225, 0x8825] and not isinstance(value, dict): + value = self.get_ifd(tag) + if ( + tag == 0x8769 + and 0xA005 in value + and not isinstance(value[0xA005], dict) + ): + value = value.copy() + value[0xA005] = self.get_ifd(0xA005) + ifd[tag] = value + return b"Exif\x00\x00" + head + ifd.tobytes(offset) + + def get_ifd(self, tag): + if tag not in self._ifds: + if tag in [0x8769, 0x8825]: + # exif, gpsinfo + if tag in self: + self._ifds[tag] = self._get_ifd_dict(self[tag]) + elif tag in [0xA005, 0x927C]: + # interop, makernote + if 0x8769 not in self._ifds: + self.get_ifd(0x8769) + tag_data = self._ifds[0x8769][tag] + if tag == 0x927C: + # makernote + from .TiffImagePlugin import ImageFileDirectory_v2 + + if tag_data[:8] == b"FUJIFILM": + ifd_offset = i32le(tag_data, 8) + ifd_data = tag_data[ifd_offset:] + + makernote = {} + for i in range(0, struct.unpack(" 4: + (offset,) = struct.unpack("H", tag_data[:2])[0]): + ifd_tag, typ, count, data = struct.unpack( + ">HHL4s", tag_data[i * 12 + 2 : (i + 1) * 12 + 2] + ) + if ifd_tag == 0x1101: + # CameraInfo + (offset,) = struct.unpack(">L", data) + self.fp.seek(offset) + + camerainfo = {"ModelID": self.fp.read(4)} + + self.fp.read(4) + # Seconds since 2000 + camerainfo["TimeStamp"] = i32le(self.fp.read(12)) + + self.fp.read(4) + camerainfo["InternalSerialNumber"] = self.fp.read(4) + + self.fp.read(12) + parallax = self.fp.read(4) + handler = ImageFileDirectory_v2._load_dispatch[ + TiffTags.FLOAT + ][1] + camerainfo["Parallax"] = handler( + ImageFileDirectory_v2(), parallax, False + ) + + self.fp.read(4) + camerainfo["Category"] = self.fp.read(2) + + makernote = {0x1101: dict(self._fixup_dict(camerainfo))} + self._ifds[tag] = makernote + else: + # interop + self._ifds[tag] = self._get_ifd_dict(tag_data) + return self._ifds.get(tag, {}) + + def __str__(self): + if self._info is not None: + # Load all keys into self._data + for tag in self._info.keys(): + self[tag] + + return str(self._data) + + def __len__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return len(keys) + + def __getitem__(self, tag): + if self._info is not None and tag not in self._data and tag in self._info: + self._data[tag] = self._fixup(self._info[tag]) + del self._info[tag] + return self._data[tag] + + def __contains__(self, tag): + return tag in self._data or (self._info is not None and tag in self._info) + + def __setitem__(self, tag, value): + if self._info is not None and tag in self._info: + del self._info[tag] + self._data[tag] = value + + def __delitem__(self, tag): + if self._info is not None and tag in self._info: + del self._info[tag] + else: + del self._data[tag] + + def __iter__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return iter(keys) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageChops.py b/sbsheriff/Lib/site-packages/PIL/ImageChops.py new file mode 100644 index 0000000..fec4694 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageChops.py @@ -0,0 +1,329 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def soft_light(image1, image2): + """ + Superimposes two images on top of each other using the Soft Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_soft_light(image2.im)) + + +def hard_light(image1, image2): + """ + Superimposes two images on top of each other using the Hard Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_hard_light(image2.im)) + + +def overlay(image1, image2): + """ + Superimposes two images on top of each other using the Overlay algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_overlay(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the offset. + If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + Both of the images must have mode "1". If you would like to perform a + logical AND on an image with a mode other than "1", try + :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask + as the second image. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + Both of the images must have mode "1". + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + Both of the images must have mode "1". + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:func:`PIL.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:func:`PIL.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If ``yoffset`` is omitted, it + is assumed to be equal to ``xoffset``. + + :param image: Input image. + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageCms.py b/sbsheriff/Lib/site-packages/PIL/ImageCms.py new file mode 100644 index 0000000..605252d --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageCms.py @@ -0,0 +1,1017 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color management support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +import sys +from enum import IntEnum + +from PIL import Image + +from ._deprecate import deprecate + +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import DeferredError + + _imagingcms = DeferredError(ex) + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + https://www.cazabon.com + + pyCMS home page: https://www.cazabon.com/pyCMS + littleCMS home page: https://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +core = _imagingcms + +# +# intent/direction values + + +class Intent(IntEnum): + PERCEPTUAL = 0 + RELATIVE_COLORIMETRIC = 1 + SATURATION = 2 + ABSOLUTE_COLORIMETRIC = 3 + + +class Direction(IntEnum): + INPUT = 0 + OUTPUT = 1 + PROOF = 2 + + +def __getattr__(name): + for enum, prefix in {Intent: "INTENT_", Direction: "DIRECTION_"}.items(): + if name.startswith(prefix): + name = name[len(prefix) :] + if name in enum.__members__: + deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: (n & 0xFF) << 16, # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + + +class ImageCmsProfile: + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isinstance(profile, str): + if sys.platform == "win32": + profile_bytes_path = profile.encode() + try: + profile_bytes_path.decode("ascii") + except UnicodeDecodeError: + with open(profile, "rb") as f: + self._set(core.profile_frombytes(f.read())) + return + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + elif isinstance(profile, _imagingcms.CmsProfile): + self._set(profile) + else: + raise TypeError("Invalid type for Profile") + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + """ + Transform. This can be used with the procedural API, or with the standard + :py:func:`~PIL.Image.Image.point` method. + + Will return the output profile in the ``output.info['icc_profile']``. + """ + + def __init__( + self, + input, + output, + input_mode, + output_mode, + intent=Intent.PERCEPTUAL, + proof=None, + proof_intent=Intent.ABSOLUTE_COLORIMETRIC, + flags=0, + ): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, input_mode, output_mode, intent, flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, + output.profile, + proof.profile, + input_mode, + output_mode, + intent, + proof_intent, + flags, + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info["icc_profile"] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info["icc_profile"] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ + (experimental) Fetches the profile for the current display device. + + :returns: ``None`` if the profile is not known. + """ + + if sys.platform != "win32": + return None + + from PIL import ImageWin + + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + if profile is None: + return None + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + + +class PyCMSError(Exception): + + """(pyCMS) Exception class. + This is used for all errors in the pyCMS API.""" + + pass + + +def profileToProfile( + im, + inputProfile, + outputProfile, + renderingIntent=Intent.PERCEPTUAL, + outputMode=None, + inPlace=False, + flags=0, +): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + ``inputProfile`` to ``outputProfile``. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and + ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised. + If an error occurs during application of the profiles, + a :exc:`PyCMSError` will be raised. + If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS), + a :exc:`PyCMSError` will be raised. + + This function applies an ICC transformation to im from ``inputProfile``'s + color space to ``outputProfile``'s color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + ``outputMode`` can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...) + or Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean. If ``True``, the original image is modified in-place, + and ``None`` is returned. If ``False`` (default), a new + :py:class:`~PIL.Image.Image` object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on + the value of ``inPlace`` + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError(f"flags must be an integer between 0 and {_MAX_FLAG}") + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, + outputProfile, + im.mode, + outputMode, + renderingIntent, + flags=flags, + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If ``profileFilename`` is not a valid filename for an ICC profile, + a :exc:`PyCMSError` will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildTransform( + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent=Intent.PERCEPTUAL, + flags=0, +): + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If an error occurs during creation + of the transform, a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile`` using the ``renderingIntent`` to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in ``inMode`` to images that are in ``outMode`` color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the ``inMode`` and ``outMode`` attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildProofTransform( + inputProfile, + outputProfile, + proofProfile, + inMode, + outMode, + renderingIntent=Intent.PERCEPTUAL, + proofRenderingIntent=Intent.ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"], +): + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device. + + If the input, output, or proof profiles specified are not valid + filenames, a :exc:`PyCMSError` will be raised. + + If an error occurs during creation of the transform, + a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device using ``renderingIntent`` and + ``proofRenderingIntent`` to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in ``inMode`` to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the ``proofProfile`` device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent + you wish to use for proof->output transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent, + proofProfile, + proofRenderingIntent, + flags, + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=False): + """ + (pyCMS) Applies a transform to a given image. + + If ``im.mode != transform.inMode``, a :exc:`PyCMSError` is raised. + + If ``inPlace`` is ``True`` and ``transform.inMode != transform.outMode``, a + :exc:`PyCMSError` is raised. + + If ``im.mode``, ``transform.inMode`` or ``transform.outMode`` is not + supported by pyCMSdll or the profiles you used for the transform, a + :exc:`PyCMSError` is raised. + + If an error occurs while the transform is being applied, + a :exc:`PyCMSError` is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set ``inPlace`` to ``True``. This can only be done if + ``transform.inMode`` and ``transform.outMode`` are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new :py:class:`~PIL.Image.Image` + object of the same dimensions in mode ``transform.outMode``. + + :param im: An :py:class:`~PIL.Image.Image` object, and im.mode must be the same + as the ``inMode`` supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is + returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the + transform applied is returned (and ``im`` is not changed). The default is + ``False``. + :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object, + depending on the value of ``inPlace``. The profile will be returned in + the image's ``info['icc_profile']``. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ``["LAB", "XYZ", "sRGB"]``, + a :exc:`PyCMSError` is raised. + + If using LAB and ``colorTemp`` is not a positive integer, + a :exc:`PyCMSError` is raised. + + If an error occurs while creating the profile, + a :exc:`PyCMSError` is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + f"Color space not supported for on-the-fly profile creation ({colorSpace})" + ) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except (TypeError, ValueError) as e: + raise PyCMSError( + f'Color temperature must be numeric, "{colorTemp}" not valid' + ) from e + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised If an error occurs while trying + to obtain the name tag, a :exc:`PyCMSError` is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.model + manufacturer = profile.profile.manufacturer + + if not (model or manufacturer): + return (profile.profile.profile_description or "") + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return f"{model} - {manufacturer}\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the info tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.profile_description + cpright = profile.profile.copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the copyright tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.copyright or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.manufacturer or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the model tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.model or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the description tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.profile_description or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the default intent, a + :exc:`PyCMSError` is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + ``intent`` with ``profile``, and that ``profile`` can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents, so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential :exc:`PyCMSError` that will occur if they don't + support the modes you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for + input, output, or proof + + INPUT = 0 (or use ImageCms.Direction.INPUT) + OUTPUT = 1 (or use ImageCms.Direction.OUTPUT) + PROOF = 2 (or use ImageCms.Direction.PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__ diff --git a/sbsheriff/Lib/site-packages/PIL/ImageColor.py b/sbsheriff/Lib/site-packages/PIL/ImageColor.py new file mode 100644 index 0000000..9cbce41 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageColor.py @@ -0,0 +1,303 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re + +from . import Image + + +def getrgb(color): + """ + Convert a color string to an RGB or RGBA tuple. If the string cannot be + parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + if len(color) > 100: + raise ValueError("color specifier is too long") + color = color.lower() + + rgb = colormap.get(color, None) + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + + # check for known string formats + if re.match("#[a-f0-9]{3}$", color): + return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16) + + if re.match("#[a-f0-9]{4}$", color): + return ( + int(color[1] * 2, 16), + int(color[2] * 2, 16), + int(color[3] * 2, 16), + int(color[4] * 2, 16), + ) + + if re.match("#[a-f0-9]{6}$", color): + return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16) + + if re.match("#[a-f0-9]{8}$", color): + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16), + int(color[7:9], 16), + ) + + m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return int(m.group(1)), int(m.group(2)), int(m.group(3)) + + m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5), + ) + + m = re.match( + r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hls_to_rgb + + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5), + ) + + m = re.match( + r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hsv_to_rgb + + rgb = hsv_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(2)) / 100.0, + float(m.group(3)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5), + ) + + m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4)) + raise ValueError(f"unknown color specifier: {repr(color)}") + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if ``mode`` is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :param mode: Convert result to this mode + :return: ``(graylevel[, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + # ITU-R Recommendation 601-2 for nonlinear RGB + # scaled to 24 bits to match the convert's implementation. + color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16 + if mode[-1] == "A": + return color, alpha + else: + if mode[-1] == "A": + return color + (alpha,) + return color + + +colormap = { + # X11 colour table from https://drafts.csswg.org/css-color-4/, with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "rebeccapurple": "#663399", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/sbsheriff/Lib/site-packages/PIL/ImageDraw.py b/sbsheriff/Lib/site-packages/PIL/ImageDraw.py new file mode 100644 index 0000000..ff94f0c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageDraw.py @@ -0,0 +1,1058 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import math +import numbers +import warnings + +from . import Image, ImageColor +from ._deprecate import deprecate + +""" +A simple 2D drawing interface for PIL images. +

+Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw: + font = None + + def __init__(self, im, mode=None): + """ + Create a drawing instance. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self._image = im + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1) + else: + self.ink = self.draw.draw_ink(-1) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = False + + def getfont(self): + """ + Get the current default font. + + To set the default font for this ImageDraw instance:: + + from PIL import ImageDraw, ImageFont + draw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") + + To set the default font for all future ImageDraw instances:: + + from PIL import ImageDraw, ImageFont + ImageDraw.ImageDraw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") + + If the current default font is ``None``, + it is initialized with ``ImageFont.load_default()``. + + :returns: An image font.""" + if not self.font: + # FIXME: should add a font repository + from . import ImageFont + + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isinstance(ink, str): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink, self._image) + ink = self.draw.draw_ink(ink) + if fill is not None: + if isinstance(fill, str): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill, self._image) + fill = self.draw.draw_ink(fill) + return ink, fill + + def arc(self, xy, start, end, fill=None, width=1): + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink, width) + + def bitmap(self, xy, bitmap, fill=None): + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord(self, xy, start, end, fill=None, outline=None, width=1): + """Draw a chord.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_chord(xy, start, end, ink, 0, width) + + def ellipse(self, xy, fill=None, outline=None, width=1): + """Draw an ellipse.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_ellipse(xy, ink, 0, width) + + def line(self, xy, fill=None, width=0, joint=None): + """Draw a line, or a connected sequence of line segments.""" + ink = self._getink(fill)[0] + if ink is not None: + self.draw.draw_lines(xy, ink, width) + if joint == "curve" and width > 4: + if not isinstance(xy[0], (list, tuple)): + xy = [tuple(xy[i : i + 2]) for i in range(0, len(xy), 2)] + for i in range(1, len(xy) - 1): + point = xy[i] + angles = [ + math.degrees(math.atan2(end[0] - start[0], start[1] - end[1])) + % 360 + for start, end in ((xy[i - 1], point), (point, xy[i + 1])) + ] + if angles[0] == angles[1]: + # This is a straight line, so no joint is required + continue + + def coord_at_angle(coord, angle): + x, y = coord + angle -= 90 + distance = width / 2 - 1 + return tuple( + p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) + for p, p_d in ( + (x, distance * math.cos(math.radians(angle))), + (y, distance * math.sin(math.radians(angle))), + ) + ) + + flipped = ( + angles[1] > angles[0] and angles[1] - 180 > angles[0] + ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0]) + coords = [ + (point[0] - width / 2 + 1, point[1] - width / 2 + 1), + (point[0] + width / 2 - 1, point[1] + width / 2 - 1), + ] + if flipped: + start, end = (angles[1] + 90, angles[0] + 90) + else: + start, end = (angles[0] - 90, angles[1] - 90) + self.pieslice(coords, start - 90, end - 90, fill) + + if width > 8: + # Cover potential gaps between the line and the joint + if flipped: + gap_coords = [ + coord_at_angle(point, angles[0] + 90), + point, + coord_at_angle(point, angles[1] + 90), + ] + else: + gap_coords = [ + coord_at_angle(point, angles[0] - 90), + point, + coord_at_angle(point, angles[1] - 90), + ] + self.line(gap_coords, fill, width=3) + + def shape(self, shape, fill=None, outline=None): + """(Experimental) Draw a shape.""" + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_outline(shape, ink, 0) + + def pieslice(self, xy, start, end, fill=None, outline=None, width=1): + """Draw a pieslice.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_pieslice(xy, start, end, ink, 0, width) + + def point(self, xy, fill=None): + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon(self, xy, fill=None, outline=None, width=1): + """Draw a polygon.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None and ink != fill and width != 0: + if width == 1: + self.draw.draw_polygon(xy, ink, 0, width) + else: + # To avoid expanding the polygon outwards, + # use the fill as a mask + mask = Image.new("1", self.im.size) + mask_ink = self._getink(1)[0] + + fill_im = mask.copy() + draw = Draw(fill_im) + draw.draw.draw_polygon(xy, mask_ink, 1) + + ink_im = mask.copy() + draw = Draw(ink_im) + width = width * 2 - 1 + draw.draw.draw_polygon(xy, mask_ink, 0, width) + + mask.paste(ink_im, mask=fill_im) + + im = Image.new(self.mode, self.im.size) + draw = Draw(im) + draw.draw.draw_polygon(xy, ink, 0, width) + self.im.paste(im.im, (0, 0) + im.size, mask.im) + + def regular_polygon( + self, bounding_circle, n_sides, rotation=0, fill=None, outline=None + ): + """Draw a regular polygon.""" + xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation) + self.polygon(xy, fill, outline) + + def rectangle(self, xy, fill=None, outline=None, width=1): + """Draw a rectangle.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_rectangle(xy, ink, 0, width) + + def rounded_rectangle(self, xy, radius=0, fill=None, outline=None, width=1): + """Draw a rounded rectangle.""" + if isinstance(xy[0], (list, tuple)): + (x0, y0), (x1, y1) = xy + else: + x0, y0, x1, y1 = xy + + d = radius * 2 + + full_x = d >= x1 - x0 + if full_x: + # The two left and two right corners are joined + d = x1 - x0 + full_y = d >= y1 - y0 + if full_y: + # The two top and two bottom corners are joined + d = y1 - y0 + if full_x and full_y: + # If all corners are joined, that is a circle + return self.ellipse(xy, fill, outline, width) + + if d == 0: + # If the corners have no curve, that is a rectangle + return self.rectangle(xy, fill, outline, width) + + r = d // 2 + ink, fill = self._getink(outline, fill) + + def draw_corners(pieslice): + if full_x: + # Draw top and bottom halves + parts = ( + ((x0, y0, x0 + d, y0 + d), 180, 360), + ((x0, y1 - d, x0 + d, y1), 0, 180), + ) + elif full_y: + # Draw left and right halves + parts = ( + ((x0, y0, x0 + d, y0 + d), 90, 270), + ((x1 - d, y0, x1, y0 + d), 270, 90), + ) + else: + # Draw four separate corners + parts = ( + ((x1 - d, y0, x1, y0 + d), 270, 360), + ((x1 - d, y1 - d, x1, y1), 0, 90), + ((x0, y1 - d, x0 + d, y1), 90, 180), + ((x0, y0, x0 + d, y0 + d), 180, 270), + ) + for part in parts: + if pieslice: + self.draw.draw_pieslice(*(part + (fill, 1))) + else: + self.draw.draw_arc(*(part + (ink, width))) + + if fill is not None: + draw_corners(True) + + if full_x: + self.draw.draw_rectangle((x0, y0 + r + 1, x1, y1 - r - 1), fill, 1) + else: + self.draw.draw_rectangle((x0 + r + 1, y0, x1 - r - 1, y1), fill, 1) + if not full_x and not full_y: + self.draw.draw_rectangle((x0, y0 + r + 1, x0 + r, y1 - r - 1), fill, 1) + self.draw.draw_rectangle((x1 - r, y0 + r + 1, x1, y1 - r - 1), fill, 1) + if ink is not None and ink != fill and width != 0: + draw_corners(False) + + if not full_x: + self.draw.draw_rectangle( + (x0 + r + 1, y0, x1 - r - 1, y0 + width - 1), ink, 1 + ) + self.draw.draw_rectangle( + (x0 + r + 1, y1 - width + 1, x1 - r - 1, y1), ink, 1 + ) + if not full_y: + self.draw.draw_rectangle( + (x0, y0 + r + 1, x0 + width - 1, y1 - r - 1), ink, 1 + ) + self.draw.draw_rectangle( + (x1 - width + 1, y0 + r + 1, x1, y1 - r - 1), ink, 1 + ) + + def _multiline_check(self, text): + """Draw text.""" + split_character = "\n" if isinstance(text, str) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + + return text.split(split_character) + + def _multiline_spacing(self, font, spacing, stroke_width): + # this can be replaced with self.textbbox(...)[3] when textsize is removed + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + return ( + self.textsize( + "A", + font=font, + stroke_width=stroke_width, + )[1] + + spacing + ) + + def text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + *args, + **kwargs, + ): + if self._multiline_check(text): + return self.multiline_text( + xy, + text, + fill, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + stroke_fill, + embedded_color, + ) + + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if font is None: + font = self.getfont() + + def getink(fill): + ink, fill = self._getink(fill) + if ink is None: + return fill + return ink + + def draw_text(ink, stroke_width=0, stroke_offset=None): + mode = self.fontmode + if stroke_width == 0 and embedded_color: + mode = "RGBA" + coord = xy + try: + mask, offset = font.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + *args, + **kwargs, + ) + coord = coord[0] + offset[0], coord[1] + offset[1] + except AttributeError: + try: + mask = font.getmask( + text, + mode, + direction, + features, + language, + stroke_width, + anchor, + ink, + *args, + **kwargs, + ) + except TypeError: + mask = font.getmask(text) + if stroke_offset: + coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1] + if mode == "RGBA": + # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A + # extract mask and set text alpha + color, mask = mask, mask.getband(3) + color.fillband(3, (ink >> 24) & 0xFF) + x, y = (int(c) for c in coord) + self.im.paste(color, (x, y, x + mask.size[0], y + mask.size[1]), mask) + else: + self.draw.draw_bitmap(coord, mask, ink) + + ink = getink(fill) + if ink is not None: + stroke_ink = None + if stroke_width: + stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink + + if stroke_ink is not None: + # Draw stroked text + draw_text(stroke_ink, stroke_width) + + # Draw normal text + draw_text(ink, 0) + else: + # Only draw normal text + draw_text(ink) + + def multiline_text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + ): + if direction == "ttb": + raise ValueError("ttb direction is unsupported for multiline text") + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + raise ValueError("anchor must be a 2 character string") + elif anchor[1] in "tb": + raise ValueError("anchor not supported for multiline text") + + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self._multiline_spacing(font, spacing, stroke_width) + for line in lines: + line_width = self.textlength( + line, font, direction=direction, features=features, language=language + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + raise ValueError('align must be "left", "center" or "right"') + + self.text( + (left, top), + line, + fill, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + stroke_fill=stroke_fill, + embedded_color=embedded_color, + ) + top += line_spacing + + def textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): + """Get the size of a given string, in pixels.""" + deprecate("textsize", 10, "textbbox or textlength") + if self._multiline_check(text): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + return self.multiline_textsize( + text, + font, + spacing, + direction, + features, + language, + stroke_width, + ) + + if font is None: + font = self.getfont() + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + return font.getsize( + text, + direction, + features, + language, + stroke_width, + ) + + def multiline_textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): + deprecate("multiline_textsize", 10, "multiline_textbbox") + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self._multiline_spacing(font, spacing, stroke_width) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + for line in lines: + line_width, line_height = self.textsize( + line, + font, + spacing, + direction, + features, + language, + stroke_width, + ) + max_width = max(max_width, line_width) + return max_width, len(lines) * line_spacing - spacing + + def textlength( + self, + text, + font=None, + direction=None, + features=None, + language=None, + embedded_color=False, + ): + """Get the length of a given string, in pixels with 1/64 precision.""" + if self._multiline_check(text): + raise ValueError("can't measure length of multiline text") + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if font is None: + font = self.getfont() + mode = "RGBA" if embedded_color else self.fontmode + try: + return font.getlength(text, mode, direction, features, language) + except AttributeError: + deprecate("textlength support for fonts without getlength", 10) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + size = self.textsize( + text, + font, + direction=direction, + features=features, + language=language, + ) + if direction == "ttb": + return size[1] + return size[0] + + def textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + ): + """Get the bounding box of a given string, in pixels.""" + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if self._multiline_check(text): + return self.multiline_textbbox( + xy, + text, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + embedded_color, + ) + + if font is None: + font = self.getfont() + mode = "RGBA" if embedded_color else self.fontmode + bbox = font.getbbox( + text, mode, direction, features, language, stroke_width, anchor + ) + return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1] + + def multiline_textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + ): + if direction == "ttb": + raise ValueError("ttb direction is unsupported for multiline text") + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + raise ValueError("anchor must be a 2 character string") + elif anchor[1] in "tb": + raise ValueError("anchor not supported for multiline text") + + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self._multiline_spacing(font, spacing, stroke_width) + for line in lines: + line_width = self.textlength( + line, + font, + direction=direction, + features=features, + language=language, + embedded_color=embedded_color, + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + bbox = None + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + raise ValueError('align must be "left", "center" or "right"') + + bbox_line = self.textbbox( + (left, top), + line, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + embedded_color=embedded_color, + ) + if bbox is None: + bbox = bbox_line + else: + bbox = ( + min(bbox[0], bbox_line[0]), + min(bbox[1], bbox_line[1]), + max(bbox[2], bbox_line[2]), + max(bbox[3], bbox_line[3]), + ) + + top += line_spacing + + if bbox is None: + return xy[0], xy[1], xy[0], xy[1] + return bbox + + +def Draw(im, mode=None): + """ + A simple 2D drawing interface for PIL images. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +def getdraw(im=None, hints=None): + """ + (Experimental) A more advanced 2D drawing interface for PIL images, + based on the WCK interface. + + :param im: The image to draw in. + :param hints: An optional list of hints. + :returns: A (drawing context, drawing resource factory) tuple. + """ + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from . import _imagingagg as handler + except ImportError: + pass + if handler is None: + from . import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +def floodfill(image, xy, value, border=None, thresh=0): + """ + (experimental) Fills a bounded region with a given color. + + :param image: Target image. + :param xy: Seed position (a 2-item coordinate tuple). See + :ref:`coordinate-system`. + :param value: Fill color. + :param border: Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + :param thresh: Optional threshold value which specifies a maximum + tolerable difference of a pixel value from the 'background' in + order for it to be replaced. Useful for filling regions of + non-homogeneous, but similar, colors. + """ + # based on an implementation by Eric S. Raymond + # amended by yo1995 @20180806 + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if _color_diff(value, background) <= thresh: + return # seed point already has fill color + pixel[x, y] = value + except (ValueError, IndexError): + return # seed point outside image + edge = {(x, y)} + # use a set to keep record of current and previous edge pixels + # to reduce memory consumption + full_edge = set() + while edge: + new_edge = set() + for (x, y) in edge: # 4 adjacent method + for (s, t) in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)): + # If already processed, or if a coordinate is negative, skip + if (s, t) in full_edge or s < 0 or t < 0: + continue + try: + p = pixel[s, t] + except (ValueError, IndexError): + pass + else: + full_edge.add((s, t)) + if border is None: + fill = _color_diff(p, background) <= thresh + else: + fill = p != value and p != border + if fill: + pixel[s, t] = value + new_edge.add((s, t)) + full_edge = edge # discard pixels processed + edge = new_edge + + +def _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation): + """ + Generate a list of vertices for a 2D regular polygon. + + :param bounding_circle: The bounding circle is a tuple defined + by a point and radius. The polygon is inscribed in this circle. + (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``) + :param n_sides: Number of sides + (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon) + :param rotation: Apply an arbitrary rotation to the polygon + (e.g. ``rotation=90``, applies a 90 degree rotation) + :return: List of regular polygon vertices + (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``) + + How are the vertices computed? + 1. Compute the following variables + - theta: Angle between the apothem & the nearest polygon vertex + - side_length: Length of each polygon edge + - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle) + - polygon_radius: Polygon radius (last element of bounding_circle) + - angles: Location of each polygon vertex in polar grid + (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0]) + + 2. For each angle in angles, get the polygon vertex at that angle + The vertex is computed using the equation below. + X= xcos(φ) + ysin(φ) + Y= −xsin(φ) + ycos(φ) + + Note: + φ = angle in degrees + x = 0 + y = polygon_radius + + The formula above assumes rotation around the origin. + In our case, we are rotating around the centroid. + To account for this, we use the formula below + X = xcos(φ) + ysin(φ) + centroid_x + Y = −xsin(φ) + ycos(φ) + centroid_y + """ + # 1. Error Handling + # 1.1 Check `n_sides` has an appropriate value + if not isinstance(n_sides, int): + raise TypeError("n_sides should be an int") + if n_sides < 3: + raise ValueError("n_sides should be an int > 2") + + # 1.2 Check `bounding_circle` has an appropriate value + if not isinstance(bounding_circle, (list, tuple)): + raise TypeError("bounding_circle should be a tuple") + + if len(bounding_circle) == 3: + *centroid, polygon_radius = bounding_circle + elif len(bounding_circle) == 2: + centroid, polygon_radius = bounding_circle + else: + raise ValueError( + "bounding_circle should contain 2D coordinates " + "and a radius (e.g. (x, y, r) or ((x, y), r) )" + ) + + if not all(isinstance(i, (int, float)) for i in (*centroid, polygon_radius)): + raise ValueError("bounding_circle should only contain numeric data") + + if not len(centroid) == 2: + raise ValueError( + "bounding_circle centre should contain 2D coordinates (e.g. (x, y))" + ) + + if polygon_radius <= 0: + raise ValueError("bounding_circle radius should be > 0") + + # 1.3 Check `rotation` has an appropriate value + if not isinstance(rotation, (int, float)): + raise ValueError("rotation should be an int or float") + + # 2. Define Helper Functions + def _apply_rotation(point, degrees, centroid): + return ( + round( + point[0] * math.cos(math.radians(360 - degrees)) + - point[1] * math.sin(math.radians(360 - degrees)) + + centroid[0], + 2, + ), + round( + point[1] * math.cos(math.radians(360 - degrees)) + + point[0] * math.sin(math.radians(360 - degrees)) + + centroid[1], + 2, + ), + ) + + def _compute_polygon_vertex(centroid, polygon_radius, angle): + start_point = [polygon_radius, 0] + return _apply_rotation(start_point, angle, centroid) + + def _get_angles(n_sides, rotation): + angles = [] + degrees = 360 / n_sides + # Start with the bottom left polygon vertex + current_angle = (270 - 0.5 * degrees) + rotation + for _ in range(0, n_sides): + angles.append(current_angle) + current_angle += degrees + if current_angle > 360: + current_angle -= 360 + return angles + + # 3. Variable Declarations + angles = _get_angles(n_sides, rotation) + + # 4. Compute Vertices + return [ + _compute_polygon_vertex(centroid, polygon_radius, angle) for angle in angles + ] + + +def _color_diff(color1, color2): + """ + Uses 1-norm distance to calculate difference between two values. + """ + if isinstance(color2, tuple): + return sum(abs(color1[i] - color2[i]) for i in range(0, len(color2))) + else: + return abs(color1 - color2) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageDraw2.py b/sbsheriff/Lib/site-packages/PIL/ImageDraw2.py new file mode 100644 index 0000000..2667b77 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageDraw2.py @@ -0,0 +1,209 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +""" +(Experimental) WCK-style drawing interface operations + +.. seealso:: :py:mod:`PIL.ImageDraw` +""" + + +import warnings + +from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath +from ._deprecate import deprecate + + +class Pen: + """Stores an outline color and width.""" + + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush: + """Stores a fill color""" + + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font: + """Stores a TrueType font and color""" + + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw: + """ + (Experimental) WCK-style drawing interface + """ + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + """Sets a transformation offset.""" + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + """ + Draws an arc (a portion of a circle outline) between the start and end + angles, inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc` + """ + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + """ + Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points + with a straight line. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord` + """ + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + """ + Draws an ellipse inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse` + """ + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + """ + Draws a line between the coordinates in the ``xy`` list. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line` + """ + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + """ + Same as arc, but also draws straight lines between the end points and the + center of the bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice` + """ + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + """ + Draws a polygon. + + The polygon outline consists of straight lines between the given + coordinates, plus a straight line between the last and the first + coordinate. + + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon` + """ + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + """ + Draws a rectangle. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle` + """ + self.render("rectangle", xy, *options) + + def text(self, xy, text, font): + """ + Draws the string at the given position. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text` + """ + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + """ + .. deprecated:: 9.2.0 + + Return the size of the given string, in pixels. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textsize` + """ + deprecate("textsize", 10, "textbbox or textlength") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + return self.draw.textsize(text, font=font.font) + + def textbbox(self, xy, text, font): + """ + Returns bounding box (in pixels) of given text. + + :return: ``(left, top, right, bottom)`` bounding box + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox` + """ + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + return self.draw.textbbox(xy, text, font=font.font) + + def textlength(self, text, font): + """ + Returns length (in pixels) of given text. + This is the amount by which following text should be offset. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength` + """ + return self.draw.textlength(text, font=font.font) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageEnhance.py b/sbsheriff/Lib/site-packages/PIL/ImageEnhance.py new file mode 100644 index 0000000..3b79d5c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageEnhance.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFilter, ImageStat + + +class _Enhance: + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + + def __init__(self, image): + self.image = image + self.intermediate_mode = "L" + if "A" in image.getbands(): + self.intermediate_mode = "LA" + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageFile.py b/sbsheriff/Lib/site-packages/PIL/ImageFile.py new file mode 100644 index 0000000..f281b9e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageFile.py @@ -0,0 +1,754 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import io +import itertools +import struct +import sys + +from . import Image +from ._util import is_path + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024 * 1024 + +LOAD_TRUNCATED_IMAGES = False +"""Whether or not to load truncated image files. User code may change this.""" + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error", +} +""" +Dict of known error codes returned from :meth:`.PyDecoder.decode`, +:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and +:meth:`.PyEncoder.encode_to_file`. +""" + + +# +# -------------------------------------------------------------------- +# Helpers + + +def raise_oserror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = f"decoder error {error}" + raise OSError(message + " when reading image file") + + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + + +class ImageFile(Image.Image): + """Base class for image file format handlers.""" + + def __init__(self, fp=None, filename=None): + super().__init__() + + self._min_frame = 0 + + self.custom_mimetype = None + + self.tile = None + """ A list of tile descriptors, or ``None`` """ + + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if is_path(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + self._exclusive_fp = True + else: + # stream + self.fp = fp + self.filename = filename + # can be overridden + self._exclusive_fp = None + + try: + try: + self._open() + except ( + IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error, + ) as v: + raise SyntaxError(v) from v + + if not self.mode or self.size[0] <= 0 or self.size[1] <= 0: + raise SyntaxError("not identified by this driver") + except BaseException: + # close the file only if we have opened it this constructor + if self._exclusive_fp: + self.fp.close() + raise + + def get_format_mimetype(self): + if self.custom_mimetype: + return self.custom_mimetype + if self.format is not None: + return Image.MIME.get(self.format.upper()) + + def verify(self): + """Check file integrity""" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def load(self): + """Load image data based on tile list""" + + if self.tile is None: + raise OSError("cannot load this image") + + pixel = Image.Image.load(self) + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, "pypy_version_info") + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + decoder_name, extents, offset, args = self.tile[0] + if ( + decoder_name == "raw" + and len(args) >= 3 + and args[0] == self.mode + and args[0] in Image._MAPMODES + ): + try: + # use mmap, if possible + import mmap + + with open(self.filename) as fp: + self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ) + if offset + self.size[1] * args[1] > self.map.size(): + # buffer is not large enough + raise OSError + self.im = Image.core.map_buffer( + self.map, self.size, decoder_name, offset, args + ) + readonly = 1 + # After trashing self.im, + # we might need to reload the palette data. + if self.palette: + self.palette.dirty = 1 + except (AttributeError, OSError, ImportError): + self.map = None + + self.load_prepare() + err_code = -3 # initialize to unknown error + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + # Remove consecutive duplicates that only differ by their offset + self.tile = [ + list(tiles)[-1] + for _, tiles in itertools.groupby( + self.tile, lambda tile: (tile[0], tile[1], tile[3]) + ) + ] + for decoder_name, extents, offset, args in self.tile: + seek(offset) + decoder = Image._getdecoder( + self.mode, decoder_name, args, self.decoderconfig + ) + try: + decoder.setimage(self.im, extents) + if decoder.pulls_fd: + decoder.setfd(self.fp) + err_code = decoder.decode(b"")[1] + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error) as e: + # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise OSError("image file is truncated") from e + + if not s: # truncated jpeg + if LOAD_TRUNCATED_IMAGES: + break + else: + raise OSError( + "image file is truncated " + f"({len(b)} bytes not processed)" + ) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + finally: + # Need to cleanup here to prevent leaks + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.load_end() + + if self._exclusive_fp and self._close_exclusive_fp_after_loading: + self.fp.close() + self.fp = None + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise_oserror(err_code) + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + def _seek_check(self, frame): + if ( + frame < self._min_frame + # Only check upper limit on frames if additional seek operations + # are not required to do so + or ( + not (hasattr(self, "_n_frames") and self._n_frames is None) + and frame >= self.n_frames + self._min_frame + ) + ): + raise EOFError("attempt to seek outside sequence") + + return self.tell() != frame + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError("StubImageFile subclass must implement _open") + + def load(self): + loader = self._load() + if loader is None: + raise OSError(f"cannot find loader for this {self.format} file") + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + return image.load() + + def _load(self): + """(Hook) Find actual image loader.""" + raise NotImplementedError("StubImageFile subclass must implement _load") + + +class Parser: + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + """ + + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception OSError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_oserror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + with io.BytesIO(self.data) as fp: + im = Image.open(fp) + except OSError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset :] + self.offset = 0 + + self.image = im + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception OSError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise OSError("image was incomplete") + if not self.image: + raise OSError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + with io.BytesIO(self.data) as fp: + try: + self.image = Image.open(fp) + finally: + self.image.load() + return self.image + + +# -------------------------------------------------------------------- + + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + try: + fh = fp.fileno() + fp.flush() + _encode_tile(im, fp, tile, bufsize, fh) + except (AttributeError, io.UnsupportedOperation) as exc: + _encode_tile(im, fp, tile, bufsize, None, exc) + if hasattr(fp, "flush"): + fp.flush() + + +def _encode_tile(im, fp, tile, bufsize, fh, exc=None): + for e, b, o, a in tile: + if o > 0: + fp.seek(o) + encoder = Image._getencoder(im.mode, e, a, im.encoderconfig) + try: + encoder.setimage(im.im, b) + if encoder.pushes_fd: + encoder.setfd(fp) + l, s = encoder.encode_to_pyfd() + else: + if exc: + # compress to Python file-compatible object + while True: + l, s, d = encoder.encode(bufsize) + fp.write(d) + if s: + break + else: + # slight speedup: compress to real file object + s = encoder.encode_to_file(fh, bufsize) + if s < 0: + raise OSError(f"encoder error {s} when writing image file") from exc + finally: + encoder.cleanup() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing size bytes of data. + + Raises an OSError if the file is truncated and the read cannot be completed + + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + data = fp.read(size) + if len(data) < size: + raise OSError("Truncated File Read") + return data + data = [] + remaining_size = size + while remaining_size > 0: + block = fp.read(min(remaining_size, SAFEBLOCK)) + if not block: + break + data.append(block) + remaining_size -= len(block) + if sum(len(d) for d in data) < size: + raise OSError("Truncated File Read") + return b"".join(data) + + +class PyCodecState: + def __init__(self): + self.xsize = 0 + self.ysize = 0 + self.xoff = 0 + self.yoff = 0 + + def extents(self): + return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize + + +class PyCodec: + def __init__(self, mode, *args): + self.im = None + self.state = PyCodecState() + self.fd = None + self.mode = mode + self.init(args) + + def init(self, args): + """ + Override to perform codec specific initialization + + :param args: Array of args items from the tile entry + :returns: None + """ + self.args = args + + def cleanup(self): + """ + Override to perform codec specific cleanup + + :returns: None + """ + pass + + def setfd(self, fd): + """ + Called from ImageFile to set the Python file-like object + + :param fd: A Python file-like object + :returns: None + """ + self.fd = fd + + def setimage(self, im, extents=None): + """ + Called from ImageFile to set the core output image for the codec + + :param im: A core image object + :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle + for this tile + :returns: None + """ + + # following c code + self.im = im + + if extents: + (x0, y0, x1, y1) = extents + else: + (x0, y0, x1, y1) = (0, 0, 0, 0) + + if x0 == 0 and x1 == 0: + self.state.xsize, self.state.ysize = self.im.size + else: + self.state.xoff = x0 + self.state.yoff = y0 + self.state.xsize = x1 - x0 + self.state.ysize = y1 - y0 + + if self.state.xsize <= 0 or self.state.ysize <= 0: + raise ValueError("Size cannot be negative") + + if ( + self.state.xsize + self.state.xoff > self.im.size[0] + or self.state.ysize + self.state.yoff > self.im.size[1] + ): + raise ValueError("Tile cannot extend outside image") + + +class PyDecoder(PyCodec): + """ + Python implementation of a format decoder. Override this class and + add the decoding logic in the :meth:`decode` method. + + See :ref:`Writing Your Own File Codec in Python` + """ + + _pulls_fd = False + + @property + def pulls_fd(self): + return self._pulls_fd + + def decode(self, buffer): + """ + Override to perform the decoding process. + + :param buffer: A bytes object with the data to be decoded. + :returns: A tuple of ``(bytes consumed, errcode)``. + If finished with decoding return -1 for the bytes consumed. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + raise NotImplementedError() + + def set_as_raw(self, data, rawmode=None): + """ + Convenience method to set the internal image from a stream of raw data + + :param data: Bytes to be set + :param rawmode: The rawmode to be used for the decoder. + If not specified, it will default to the mode of the image + :returns: None + """ + + if not rawmode: + rawmode = self.mode + d = Image._getdecoder(self.mode, "raw", rawmode) + d.setimage(self.im, self.state.extents()) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + +class PyEncoder(PyCodec): + """ + Python implementation of a format encoder. Override this class and + add the decoding logic in the :meth:`encode` method. + + See :ref:`Writing Your Own File Codec in Python` + """ + + _pushes_fd = False + + @property + def pushes_fd(self): + return self._pushes_fd + + def encode(self, bufsize): + """ + Override to perform the encoding process. + + :param bufsize: Buffer size. + :returns: A tuple of ``(bytes encoded, errcode, bytes)``. + If finished with encoding return 1 for the error code. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + raise NotImplementedError() + + def encode_to_pyfd(self): + """ + If ``pushes_fd`` is ``True``, then this method will be used, + and ``encode()`` will only be called once. + + :returns: A tuple of ``(bytes consumed, errcode)``. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + if not self.pushes_fd: + return 0, -8 # bad configuration + bytes_consumed, errcode, data = self.encode(0) + if data: + self.fd.write(data) + return bytes_consumed, errcode + + def encode_to_file(self, fh, bufsize): + """ + :param fh: File handle. + :param bufsize: Buffer size. + + :returns: If finished successfully, return 0. + Otherwise, return an error code. Err codes are from + :data:`.ImageFile.ERRORS`. + """ + errcode = 0 + while errcode == 0: + status, errcode, buf = self.encode(bufsize) + if status > 0: + fh.write(buf[status:]) + return errcode diff --git a/sbsheriff/Lib/site-packages/PIL/ImageFilter.py b/sbsheriff/Lib/site-packages/PIL/ImageFilter.py new file mode 100644 index 0000000..e10c6fd --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageFilter.py @@ -0,0 +1,538 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +import functools + + +class Filter: + pass + + +class MultibandFilter(Filter): + pass + + +class BuiltinFilter(MultibandFilter): + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class Kernel(BuiltinFilter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. The default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + name = "Kernel" + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a + b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the ``rank``'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size // 2, self.size // 2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size * size // 2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size * size - 1 + + +class ModeFilter(Filter): + """ + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(MultibandFilter): + """Blurs the image with a sequence of extended box filters, which + approximates a Gaussian kernel. For details on accuracy see + + + :param radius: Standard deviation of the Gaussian kernel. + """ + + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class BoxBlur(MultibandFilter): + """Blurs the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + """ + + name = "BoxBlur" + + def __init__(self, radius): + self.radius = radius + + def filter(self, image): + return image.box_blur(self.radius) + + +class UnsharpMask(MultibandFilter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ # noqa: E501 + + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + # fmt: off + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class CONTOUR(BuiltinFilter): + name = "Contour" + # fmt: off + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class DETAIL(BuiltinFilter): + name = "Detail" + # fmt: off + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0, + ) + # fmt: on + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + # fmt: off + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1, + ) + # fmt: on + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1, + ) + # fmt: on + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + # fmt: off + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0, + ) + # fmt: on + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + # fmt: off + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2, + ) + # fmt: on + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + # fmt: off + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1, + ) + # fmt: on + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + # fmt: off + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class Color3DLUT(MultibandFilter): + """Three-dimensional color lookup table. + + Transforms 3-channel pixels using the values of the channels as coordinates + in the 3D lookup table and interpolating the nearest elements. + + This method allows you to apply almost any color transformation + in constant time by using pre-calculated decimated tables. + + .. versionadded:: 5.2.0 + + :param size: Size of the table. One int or tuple of (int, int, int). + Minimal size in any dimension is 2, maximum is 65. + :param table: Flat lookup table. A list of ``channels * size**3`` + float elements or a list of ``size**3`` channels-sized + tuples with floats. Channels are changed first, + then first dimension, then second, then third. + Value 0.0 corresponds lowest value of output, 1.0 highest. + :param channels: Number of channels in the table. Could be 3 or 4. + Default is 3. + :param target_mode: A mode for the result image. Should have not less + than ``channels`` channels. Default is ``None``, + which means that mode wouldn't be changed. + """ + + name = "Color 3D LUT" + + def __init__(self, size, table, channels=3, target_mode=None, **kwargs): + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + self.size = size = self._check_size(size) + self.channels = channels + self.mode = target_mode + + # Hidden flag `_copy_table=False` could be used to avoid extra copying + # of the table if the table is specially made for the constructor. + copy_table = kwargs.get("_copy_table", True) + items = size[0] * size[1] * size[2] + wrong_size = False + + numpy = None + if hasattr(table, "shape"): + try: + import numpy + except ImportError: # pragma: no cover + pass + + if numpy and isinstance(table, numpy.ndarray): + if copy_table: + table = table.copy() + + if table.shape in [ + (items * channels,), + (items, channels), + (size[2], size[1], size[0], channels), + ]: + table = table.reshape(items * channels) + else: + wrong_size = True + + else: + if copy_table: + table = list(table) + + # Convert to a flat list + if table and isinstance(table[0], (list, tuple)): + table, raw_table = [], table + for pixel in raw_table: + if len(pixel) != channels: + raise ValueError( + "The elements of the table should " + "have a length of {}.".format(channels) + ) + table.extend(pixel) + + if wrong_size or len(table) != items * channels: + raise ValueError( + "The table should have either channels * size**3 float items " + "or size**3 items of channels-sized tuples with floats. " + f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. " + f"Actual length: {len(table)}" + ) + self.table = table + + @staticmethod + def _check_size(size): + try: + _, _, _ = size + except ValueError as e: + raise ValueError( + "Size should be either an integer or a tuple of three integers." + ) from e + except TypeError: + size = (size, size, size) + size = [int(x) for x in size] + for size_1d in size: + if not 2 <= size_1d <= 65: + raise ValueError("Size should be in [2, 65] range.") + return size + + @classmethod + def generate(cls, size, callback, channels=3, target_mode=None): + """Generates new LUT using provided callback. + + :param size: Size of the table. Passed to the constructor. + :param callback: Function with three parameters which correspond + three color channels. Will be called ``size**3`` + times with values from 0.0 to 1.0 and should return + a tuple with ``channels`` elements. + :param channels: The number of channels which should return callback. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + size_1d, size_2d, size_3d = cls._check_size(size) + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + + table = [0] * (size_1d * size_2d * size_3d * channels) + idx_out = 0 + for b in range(size_3d): + for g in range(size_2d): + for r in range(size_1d): + table[idx_out : idx_out + channels] = callback( + r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1) + ) + idx_out += channels + + return cls( + (size_1d, size_2d, size_3d), + table, + channels=channels, + target_mode=target_mode, + _copy_table=False, + ) + + def transform(self, callback, with_normals=False, channels=None, target_mode=None): + """Transforms the table values using provided callback and returns + a new LUT with altered values. + + :param callback: A function which takes old lookup table values + and returns a new set of values. The number + of arguments which function should take is + ``self.channels`` or ``3 + self.channels`` + if ``with_normals`` flag is set. + Should return a tuple of ``self.channels`` or + ``channels`` elements if it is set. + :param with_normals: If true, ``callback`` will be called with + coordinates in the color cube as the first + three arguments. Otherwise, ``callback`` + will be called only with actual color values. + :param channels: The number of channels in the resulting lookup table. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + if channels not in (None, 3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + ch_in = self.channels + ch_out = channels or ch_in + size_1d, size_2d, size_3d = self.size + + table = [0] * (size_1d * size_2d * size_3d * ch_out) + idx_in = 0 + idx_out = 0 + for b in range(size_3d): + for g in range(size_2d): + for r in range(size_1d): + values = self.table[idx_in : idx_in + ch_in] + if with_normals: + values = callback( + r / (size_1d - 1), + g / (size_2d - 1), + b / (size_3d - 1), + *values, + ) + else: + values = callback(*values) + table[idx_out : idx_out + ch_out] = values + idx_in += ch_in + idx_out += ch_out + + return type(self)( + self.size, + table, + channels=ch_out, + target_mode=target_mode or self.mode, + _copy_table=False, + ) + + def __repr__(self): + r = [ + f"{self.__class__.__name__} from {self.table.__class__.__name__}", + "size={:d}x{:d}x{:d}".format(*self.size), + f"channels={self.channels:d}", + ] + if self.mode: + r.append(f"target_mode={self.mode}") + return "<{}>".format(" ".join(r)) + + def filter(self, image): + from . import Image + + return image.color_lut_3d( + self.mode or image.mode, + Image.Resampling.BILINEAR, + self.channels, + self.size[0], + self.size[1], + self.size[2], + self.table, + ) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageFont.py b/sbsheriff/Lib/site-packages/PIL/ImageFont.py new file mode 100644 index 0000000..457e906 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageFont.py @@ -0,0 +1,1181 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import base64 +import os +import sys +import warnings +from enum import IntEnum +from io import BytesIO + +from . import Image +from ._deprecate import deprecate +from ._util import is_directory, is_path + + +class Layout(IntEnum): + BASIC = 0 + RAQM = 1 + + +def __getattr__(name): + for enum, prefix in {Layout: "LAYOUT_"}.items(): + if name.startswith(prefix): + name = name[len(prefix) :] + if name in enum.__members__: + deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +class _ImagingFtNotInstalled: + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + + +try: + from . import _imagingft as core +except ImportError: + core = _ImagingFtNotInstalled() + + +_UNSPECIFIED = object() + + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont: + """PIL font wrapper""" + + def _load_pilfont(self, filename): + + with open(filename, "rb") as fp: + image = None + for ext in (".png", ".gif", ".pbm"): + if image: + image.close() + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except Exception: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + if image: + image.close() + raise OSError("cannot find glyph data file") + + self.file = fullname + + self._load_pilfont_data(fp, image) + image.close() + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256 * 20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + def getsize(self, text, *args, **kwargs): + """ + .. deprecated:: 9.2.0 + + Use :py:meth:`.getbbox` or :py:meth:`.getlength` instead. + + See :ref:`deprecations ` for more information. + + Returns width and height (in pixels) of given text. + + :param text: Text to measure. + + :return: (width, height) + """ + deprecate("getsize", 10, "getbbox or getlength") + return self.font.getsize(text) + + def getmask(self, text, mode="", *args, **kwargs): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.font.getmask(text, mode) + + def getbbox(self, text, *args, **kwargs): + """ + Returns bounding box (in pixels) of given text. + + .. versionadded:: 9.2.0 + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :return: ``(left, top, right, bottom)`` bounding box + """ + width, height = self.font.getsize(text) + return 0, 0, width, height + + def getlength(self, text, *args, **kwargs): + """ + Returns length (in pixels) of given text. + This is the amount by which following text should be offset. + + .. versionadded:: 9.2.0 + """ + width, height = self.font.getsize(text) + return width + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + + +class FreeTypeFont: + """FreeType font wrapper (requires _imagingft service)""" + + def __init__(self, font=None, size=10, index=0, encoding="", layout_engine=None): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if layout_engine not in (Layout.BASIC, Layout.RAQM): + layout_engine = Layout.BASIC + if core.HAVE_RAQM: + layout_engine = Layout.RAQM + elif layout_engine == Layout.RAQM and not core.HAVE_RAQM: + warnings.warn( + "Raqm layout was requested, but Raqm is not available. " + "Falling back to basic layout." + ) + layout_engine = Layout.BASIC + + self.layout_engine = layout_engine + + def load_from_bytes(f): + self.font_bytes = f.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes, layout_engine + ) + + if is_path(font): + if sys.platform == "win32": + font_bytes_path = font if isinstance(font, bytes) else font.encode() + try: + font_bytes_path.decode("ascii") + except UnicodeDecodeError: + # FreeType cannot load fonts with non-ASCII characters on Windows + # So load it into memory first + with open(font, "rb") as f: + load_from_bytes(f) + return + self.font = core.getfont( + font, size, index, encoding, layout_engine=layout_engine + ) + else: + load_from_bytes(font) + + def __getstate__(self): + return [self.path, self.size, self.index, self.encoding, self.layout_engine] + + def __setstate__(self, state): + path, size, index, encoding, layout_engine = state + self.__init__(path, size, index, encoding, layout_engine) + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + return text.split(split_character) + + def getname(self): + """ + :return: A tuple of the font family (e.g. Helvetica) and the font style + (e.g. Bold) + """ + return self.font.family, self.font.style + + def getmetrics(self): + """ + :return: A tuple of the font ascent (the distance from the baseline to + the highest outline point) and descent (the distance from the + baseline to the lowest outline point, a negative value) + """ + return self.font.ascent, self.font.descent + + def getlength(self, text, mode="", direction=None, features=None, language=None): + """ + Returns length (in pixels with 1/64 precision) of given text when rendered + in font with provided direction, features, and language. + + This is the amount by which following text should be offset. + Text bounding box may extend past the length in some fonts, + e.g. when using italics or accents. + + The result is returned as a float; it is a whole number if using basic layout. + + Note that the sum of two lengths may not equal the length of a concatenated + string due to kerning. If you need to adjust for kerning, include the following + character and subtract its length. + + For example, instead of + + .. code-block:: python + + hello = font.getlength("Hello") + world = font.getlength("World") + hello_world = hello + world # not adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # may fail + + use + + .. code-block:: python + + hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning + world = font.getlength("World") + hello_world = hello + world # adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # True + + or disable kerning with (requires libraqm) + + .. code-block:: python + + hello = draw.textlength("Hello", font, features=["-kern"]) + world = draw.textlength("World", font, features=["-kern"]) + hello_world = hello + world # kerning is disabled, no need to adjust + assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"]) + + .. versionadded:: 8.0.0 + + :param text: Text to measure. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :return: Width for horizontal, height for vertical text. + """ + return self.font.getlength(text, mode, direction, features, language) / 64 + + def getbbox( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ): + """ + Returns bounding box (in pixels) of given text relative to given anchor + when rendered in font with provided direction, features, and language. + + Use :py:meth:`getlength()` to get the offset of following text with + 1/64 pixel precision. The bounding box includes extra margins for + some fonts, e.g. italics or accents. + + .. versionadded:: 8.0.0 + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :param stroke_width: The width of the text stroke. + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + :return: ``(left, top, right, bottom)`` bounding box + """ + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) + left, top = offset[0] - stroke_width, offset[1] - stroke_width + width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width + return left, top, left + width, top + height + + def getsize( + self, + text, + direction=None, + features=None, + language=None, + stroke_width=0, + ): + """ + .. deprecated:: 9.2.0 + + Use :py:meth:`getlength()` to measure the offset of following text with + 1/64 pixel precision. + Use :py:meth:`getbbox()` to get the exact bounding box based on an anchor. + + See :ref:`deprecations ` for more information. + + Returns width and height (in pixels) of given text if rendered in font with + provided direction, features, and language. + + .. note:: For historical reasons this function measures text height from + the ascender line instead of the top, see :ref:`text-anchors`. + If you wish to measure text height from the top, it is recommended + to use the bottom value of :meth:`getbbox` with ``anchor='lt'`` instead. + + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ + deprecate("getsize", 10, "getbbox or getlength") + # vertical offset is added for historical reasons + # see https://github.com/python-pillow/Pillow/pull/4910#discussion_r486682929 + size, offset = self.font.getsize(text, "L", direction, features, language) + return ( + size[0] + stroke_width * 2, + size[1] + stroke_width * 2 + offset[1], + ) + + def getsize_multiline( + self, + text, + direction=None, + spacing=4, + features=None, + language=None, + stroke_width=0, + ): + """ + .. deprecated:: 9.2.0 + + Use :py:meth:`.ImageDraw.multiline_textbbox` instead. + + See :ref:`deprecations ` for more information. + + Returns width and height (in pixels) of given text if rendered in font + with provided direction, features, and language, while respecting + newline characters. + + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param spacing: The vertical gap between lines, defaulting to 4 pixels. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ + deprecate("getsize_multiline", 10, "ImageDraw.multiline_textbbox") + max_width = 0 + lines = self._multiline_split(text) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + line_spacing = self.getsize("A", stroke_width=stroke_width)[1] + spacing + for line in lines: + line_width, line_height = self.getsize( + line, direction, features, language, stroke_width + ) + max_width = max(max_width, line_width) + + return max_width, len(lines) * line_spacing - spacing + + def getoffset(self, text): + """ + .. deprecated:: 9.2.0 + + Use :py:meth:`.getbbox` instead. + + See :ref:`deprecations ` for more information. + + Returns the offset of given text. This is the gap between the + starting coordinate and the first marking. Note that this gap is + included in the result of :py:func:`~PIL.ImageFont.FreeTypeFont.getsize`. + + :param text: Text to measure. + + :return: A tuple of the x and y offset + """ + deprecate("getoffset", 10, "getbbox") + return self.font.getsize(text)[1] + + def getmask( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + )[0] + + def getmask2( + self, + text, + mode="", + fill=_UNSPECIFIED, + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + *args, + **kwargs, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param fill: Optional fill function. By default, an internal Pillow function + will be used. + + Deprecated. This parameter will be removed in Pillow 10 + (2023-07-01). + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :return: A tuple of an internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module, and the text offset, the + gap between the starting coordinate and the first marking + """ + if fill is _UNSPECIFIED: + fill = Image.core.fill + else: + deprecate("fill", 10) + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) + size = size[0] + stroke_width * 2, size[1] + stroke_width * 2 + offset = offset[0] - stroke_width, offset[1] - stroke_width + Image._decompression_bomb_check(size) + im = fill("RGBA" if mode == "RGBA" else "L", size, 0) + self.font.render( + text, im.id, mode, direction, features, language, stroke_width, ink + ) + return im, offset + + def font_variant( + self, font=None, size=None, index=None, encoding=None, layout_engine=None + ): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + if font is None: + try: + font = BytesIO(self.font_bytes) + except AttributeError: + font = self.path + return FreeTypeFont( + font=font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else encoding, + layout_engine=layout_engine or self.layout_engine, + ) + + def get_variation_names(self): + """ + :returns: A list of the named styles in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + names = self.font.getvarnames() + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + return [name.replace(b"\x00", b"") for name in names] + + def set_variation_by_name(self, name): + """ + :param name: The name of the style. + :exception OSError: If the font is not a variation font. + """ + names = self.get_variation_names() + if not isinstance(name, bytes): + name = name.encode() + index = names.index(name) + 1 + + if index == getattr(self, "_last_variation_index", None): + # When the same name is set twice in a row, + # there is an 'unknown freetype error' + # https://savannah.nongnu.org/bugs/?56186 + return + self._last_variation_index = index + + self.font.setvarname(index) + + def get_variation_axes(self): + """ + :returns: A list of the axes in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + axes = self.font.getvaraxes() + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + for axis in axes: + axis["name"] = axis["name"].replace(b"\x00", b"") + return axes + + def set_variation_by_axes(self, axes): + """ + :param axes: A list of values for each axis. + :exception OSError: If the font is not a variation font. + """ + try: + self.font.setvaraxes(axes) + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + + +class TransposedFont: + """Wrapper for writing rotated or mirrored text""" + + def __init__(self, font, orientation=None): + """ + Wrapper that creates a transposed font from any existing font + object. + + :param font: A font object. + :param orientation: An optional orientation. If given, this should + be one of Image.Transpose.FLIP_LEFT_RIGHT, Image.Transpose.FLIP_TOP_BOTTOM, + Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_180, or + Image.Transpose.ROTATE_270. + """ + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text, *args, **kwargs): + """ + .. deprecated:: 9.2.0 + + Use :py:meth:`.getbbox` or :py:meth:`.getlength` instead. + + See :ref:`deprecations ` for more information. + """ + deprecate("getsize", 10, "getbbox or getlength") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + w, h = self.font.getsize(text) + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode="", *args, **kwargs): + im = self.font.getmask(text, mode, *args, **kwargs) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + def getbbox(self, text, *args, **kwargs): + # TransposedFont doesn't support getmask2, move top-left point to (0, 0) + # this has no effect on ImageFont and simulates anchor="lt" for FreeTypeFont + left, top, right, bottom = self.font.getbbox(text, *args, **kwargs) + width = right - left + height = bottom - top + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + return 0, 0, height, width + return 0, 0, width, height + + def getlength(self, text, *args, **kwargs): + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + raise ValueError( + "text length is undefined for text rotated by 90 or 270 degrees" + ) + return self.font.getlength(text, *args, **kwargs) + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding="", layout_engine=None): + """ + Load a TrueType or OpenType font from a file or file-like object, + and create a font object. + This function loads a font object from the given file or file-like + object, and creates a font object for a font of the given size. + + Pillow uses FreeType to open font files. On Windows, be aware that FreeType + will keep the file open as long as the FreeTypeFont object exists. Windows + limits the number of files that can be open in C at once to 512, so if many + fonts are opened simultaneously and that limit is approached, an + ``OSError`` may be thrown, reporting that FreeType "cannot open resource". + A workaround would be to copy the file(s) into memory, and open that instead. + + This function requires the _imagingft service. + + :param font: A filename or file-like object containing a TrueType font. + If the file is not found in this filename, the loader may also + search in other directories, such as the :file:`fonts/` + directory on Windows or :file:`/Library/Fonts/`, + :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on + macOS. + + :param size: The requested size, in pixels. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Possible + encodings include (see the FreeType documentation for more + information): + + * "unic" (Unicode) + * "symb" (Microsoft Symbol) + * "ADOB" (Adobe Standard) + * "ADBE" (Adobe Expert) + * "ADBC" (Adobe Custom) + * "armn" (Apple Roman) + * "sjis" (Shift JIS) + * "gb " (PRC) + * "big5" + * "wans" (Extended Wansung) + * "joha" (Johab) + * "lat1" (Latin-1) + + This specifies the character set to use. It does not alter the + encoding of any text provided in subsequent operations. + :param layout_engine: Which layout engine to use, if available: + :data:`.ImageFont.Layout.BASIC` or :data:`.ImageFont.Layout.RAQM`. + If it is available, Raqm layout will be used by default. + Otherwise, basic layout will be used. + + Raqm layout is recommended for all non-English text. If Raqm layout + is not required, basic layout will have better performance. + + You can check support for Raqm layout using + :py:func:`PIL.features.check_feature` with ``feature="raqm"``. + + .. versionadded:: 4.2.0 + :return: A font object. + :exception OSError: If the file could not be read. + """ + + def freetype(font): + return FreeTypeFont(font, size, index, encoding, layout_engine) + + try: + return freetype(font) + except OSError: + if not is_path(font): + raise + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ("linux", "linux2"): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = "/usr/share" + dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")] + elif sys.platform == "darwin": + dirs += [ + "/Library/Fonts", + "/System/Library/Fonts", + os.path.expanduser("~/Library/Fonts"), + ] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + return freetype(os.path.join(walkroot, walkfilename)) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == ".ttf": + return freetype(fontpath) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return freetype(first_font_with_a_different_extension) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + for directory in sys.path: + if is_directory(directory): + if not isinstance(filename, str): + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except OSError: + pass + raise OSError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO( + base64.b64decode( + b""" +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +""" + ) + ), + Image.open( + BytesIO( + base64.b64decode( + b""" +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +""" + ) + ) + ), + ) + return f diff --git a/sbsheriff/Lib/site-packages/PIL/ImageGrab.py b/sbsheriff/Lib/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000..38074cb --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageGrab.py @@ -0,0 +1,135 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import os +import shutil +import subprocess +import sys +import tempfile + +from . import Image + + +def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None): + if xdisplay is None: + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + args = ["screencapture"] + if bbox: + left, top, right, bottom = bbox + args += ["-R", f"{left},{top},{right-left},{bottom-top}"] + subprocess.call(args + ["-x", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_resized = im.resize((right - left, bottom - top)) + im.close() + return im_resized + return im + elif sys.platform == "win32": + offset, size, data = Image.core.grabscreen_win32( + include_layered_windows, all_screens + ) + im = Image.frombytes( + "RGB", + size, + data, + # RGB, 32-bit line padding, origin lower left corner + "raw", + "BGR", + (size[0] * 3 + 3) & -4, + -1, + ) + if bbox: + x0, y0 = offset + left, top, right, bottom = bbox + im = im.crop((left - x0, top - y0, right - x0, bottom - y0)) + return im + elif shutil.which("gnome-screenshot"): + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + subprocess.call(["gnome-screenshot", "-f", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_cropped = im.crop(bbox) + im.close() + return im_cropped + return im + # use xdisplay=None for default display on non-win32/macOS systems + if not Image.core.HAVE_XCB: + raise OSError("Pillow was built without XCB support") + size, data = Image.core.grabscreen_x11(xdisplay) + im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".jpg") + os.close(fh) + commands = [ + 'set theFile to (open for access POSIX file "' + + filepath + + '" with write permission)', + "try", + " write (the clipboard as JPEG picture) to theFile", + "end try", + "close access theFile", + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + elif sys.platform == "win32": + fmt, data = Image.core.grabclipboard_win32() + if fmt == "file": # CF_HDROP + import struct + + o = struct.unpack_from("I", data)[0] + if data[16] != 0: + files = data[o:].decode("utf-16le").split("\0") + else: + files = data[o:].decode("mbcs").split("\0") + return files[: files.index("")] + if isinstance(data, bytes): + import io + + data = io.BytesIO(data) + if fmt == "png": + from . import PngImagePlugin + + return PngImagePlugin.PngImageFile(data) + elif fmt == "DIB": + from . import BmpImagePlugin + + return BmpImagePlugin.DibImageFile(data) + return None + else: + raise NotImplementedError("ImageGrab.grabclipboard() is macOS and Windows only") diff --git a/sbsheriff/Lib/site-packages/PIL/ImageMath.py b/sbsheriff/Lib/site-packages/PIL/ImageMath.py new file mode 100644 index 0000000..09d9898 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageMath.py @@ -0,0 +1,259 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import builtins + +from . import Image, _imagingmath + + +def _isconstant(v): + return isinstance(v, (int, float)) + + +class _Operand: + """Wraps an image operand, providing standard operators""" + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError(f"unsupported mode: {im1.im.mode}") + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op + "_" + im1.mode) + except AttributeError as e: + raise TypeError(f"bad operand type for '{op}'") from e + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op + "_" + im1.mode) + except AttributeError as e: + raise TypeError(f"bad operand type for '{op}'") from e + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + compiled_code = compile(expression, "", "eval") + + def scan(code): + for const in code.co_consts: + if type(const) == type(compiled_code): + scan(const) + + for name in code.co_names: + if name not in args and name != "abs": + raise ValueError(f"'{name}' not allowed") + + scan(compiled_code) + out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args) + try: + return out.im + except AttributeError: + return out diff --git a/sbsheriff/Lib/site-packages/PIL/ImageMode.py b/sbsheriff/Lib/site-packages/PIL/ImageMode.py new file mode 100644 index 0000000..0973536 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageMode.py @@ -0,0 +1,91 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import sys + +# mode descriptor cache +_modes = None + + +class ModeDescriptor: + """Wrapper for mode strings.""" + + def __init__(self, mode, bands, basemode, basetype, typestr): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + self.typestr = typestr + + def __str__(self): + return self.mode + + +def getmode(mode): + """Gets a mode descriptor for the given mode.""" + global _modes + if not _modes: + # initialize mode cache + modes = {} + endian = "<" if sys.byteorder == "little" else ">" + for m, (basemode, basetype, bands, typestr) in { + # core modes + # Bits need to be extended to bytes + "1": ("L", "L", ("1",), "|b1"), + "L": ("L", "L", ("L",), "|u1"), + "I": ("L", "I", ("I",), endian + "i4"), + "F": ("L", "F", ("F",), endian + "f4"), + "P": ("P", "L", ("P",), "|u1"), + "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"), + "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"), + "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"), + # UNDONE - unsigned |u1i1i1 + "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"), + "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"), + # extra experimental modes + "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"), + "BGR;15": ("RGB", "L", ("B", "G", "R"), endian + "u2"), + "BGR;16": ("RGB", "L", ("B", "G", "R"), endian + "u2"), + "BGR;24": ("RGB", "L", ("B", "G", "R"), endian + "u3"), + "BGR;32": ("RGB", "L", ("B", "G", "R"), endian + "u4"), + "LA": ("L", "L", ("L", "A"), "|u1"), + "La": ("L", "L", ("L", "a"), "|u1"), + "PA": ("RGB", "L", ("P", "A"), "|u1"), + }.items(): + modes[m] = ModeDescriptor(m, bands, basemode, basetype, typestr) + # mapping modes + for i16mode, typestr in { + # I;16 == I;16L, and I;32 == I;32L + "I;16": "u2", + "I;16BS": ">i2", + "I;16N": endian + "u2", + "I;16NS": endian + "i2", + "I;32": "u4", + "I;32L": "i4", + "I;32LS": " + +import re + +from . import Image, _imagingmorph + +LUT_SIZE = 1 << 9 + +# fmt: off +ROTATION_MATRIX = [ + 6, 3, 0, + 7, 4, 1, + 8, 5, 2, +] +MIRROR_MATRIX = [ + 2, 1, 0, + 5, 4, 3, + 8, 7, 6, +] +# fmt: on + + +class LutBuilder: + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"], + "dilation4": ["4:(... .0. .1.)->1"], + "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"], + "erosion4": ["4:(... .1. .0.)->0"], + "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"], + "edge": [ + "1:(... ... ...)->0", + "4:(.0. .1. ...)->1", + "4:(01. .1. ...)->1", + ], + } + if op_name not in known_patterns: + raise Exception("Unknown pattern " + op_name + "!") + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE)) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert len(permutation) == 9 + return "".join(pattern[p] for p in permutation) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if "4" in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res) + ) + # mirror + if "M" in options: + n = len(patterns) + for pattern, res in patterns[:n]: + patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res)) + + # negate + if "N" in options: + n = len(patterns) + for pattern, res in patterns[:n]: + # Swap 0 and 1 + pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1") + res = 1 - int(res) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", "")) + if not m: + raise Exception('Syntax error in pattern "' + p + '"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(" ", "").replace("\n", "") + + patterns += self._pattern_permute(pattern, options, result) + + # compile the patterns into regular expressions for speed + for i, pattern in enumerate(patterns): + p = pattern[0].replace(".", "X").replace("X", "[01]") + p = re.compile(p) + patterns[i] = (p, pattern[1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp: + """A class for binary morphological operators""" + + def __init__(self, lut=None, op_name=None, patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception("No operator loaded") + + if image.mode != "L": + raise ValueError("Image mode must be L") + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + if self.lut is None: + raise Exception("No operator loaded") + + if image.mode != "L": + raise ValueError("Image mode must be L") + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + + if image.mode != "L": + raise ValueError("Image mode must be L") + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, "rb") as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != LUT_SIZE: + self.lut = None + raise Exception("Wrong size operator file!") + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception("No operator loaded") + with open(filename, "wb") as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut diff --git a/sbsheriff/Lib/site-packages/PIL/ImageOps.py b/sbsheriff/Lib/site-packages/PIL/ImageOps.py new file mode 100644 index 0000000..443c540 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageOps.py @@ -0,0 +1,616 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import functools +import operator +import re + +from . import Image, ImagePalette + +# +# helpers + + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isinstance(color, str): + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise OSError("not supported for this image mode") + + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None, mask=None, preserve_tone=False): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image (or mask region), removes ``cutoff`` percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: The percent to cut off from the histogram on the low and + high ends. Either a tuple of (low, high), or a single + number for both. + :param ignore: The background pixel value (use None for no background). + :param mask: Histogram used in contrast operation is computed using pixels + within the mask. If no mask is given the entire image is used + for histogram computation. + :param preserve_tone: Preserve image tone in Photoshop-like style autocontrast. + + .. versionadded:: 8.2.0 + + :return: An image. + """ + if preserve_tone: + histogram = image.convert("L").histogram(mask) + else: + histogram = image.histogram(mask) + + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer : layer + 256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + if not isinstance(cutoff, tuple): + cutoff = (cutoff, cutoff) + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff[0] // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the high end + cut = n * cutoff[1] // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127): + """ + Colorize grayscale image. + This function calculates a color wedge which maps all black pixels in + the source image to the first color and all white pixels to the + second color. If ``mid`` is specified, it uses three-color mapping. + The ``black`` and ``white`` arguments should be RGB tuples or color names; + optionally you can use three-color mapping by also specifying ``mid``. + Mapping positions for any of the colors can be specified + (e.g. ``blackpoint``), where these parameters are the integer + value corresponding to where the corresponding color should be mapped. + These parameters must have logical order, such that + ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified). + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :param mid: The color to use for midtone input pixels. + :param blackpoint: an int value [0, 255] for the black mapping. + :param whitepoint: an int value [0, 255] for the white mapping. + :param midpoint: an int value [0, 255] for the midtone mapping. + :return: An image. + """ + + # Initial asserts + assert image.mode == "L" + if mid is None: + assert 0 <= blackpoint <= whitepoint <= 255 + else: + assert 0 <= blackpoint <= midpoint <= whitepoint <= 255 + + # Define colors from arguments + black = _color(black, "RGB") + white = _color(white, "RGB") + if mid is not None: + mid = _color(mid, "RGB") + + # Empty lists for the mapping + red = [] + green = [] + blue = [] + + # Create the low-end values + for i in range(0, blackpoint): + red.append(black[0]) + green.append(black[1]) + blue.append(black[2]) + + # Create the mapping (2-color) + if mid is None: + + range_map = range(0, whitepoint - blackpoint) + + for i in range_map: + red.append(black[0] + i * (white[0] - black[0]) // len(range_map)) + green.append(black[1] + i * (white[1] - black[1]) // len(range_map)) + blue.append(black[2] + i * (white[2] - black[2]) // len(range_map)) + + # Create the mapping (3-color) + else: + + range_map1 = range(0, midpoint - blackpoint) + range_map2 = range(0, whitepoint - midpoint) + + for i in range_map1: + red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1)) + green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1)) + blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1)) + for i in range_map2: + red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2)) + green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2)) + blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2)) + + # Create the high-end values + for i in range(0, 256 - whitepoint): + red.append(white[0]) + green.append(white[1]) + blue.append(white[2]) + + # Return converted image + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def contain(image, size, method=Image.Resampling.BICUBIC): + """ + Returns a resized version of the image, set to the maximum width and height + within the requested size, while maintaining the original aspect ratio. + + :param image: The image to resize and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :return: An image. + """ + + im_ratio = image.width / image.height + dest_ratio = size[0] / size[1] + + if im_ratio != dest_ratio: + if im_ratio > dest_ratio: + new_height = round(image.height / image.width * size[0]) + if new_height != size[1]: + size = (size[0], new_height) + else: + new_width = round(image.width / image.height * size[1]) + if new_width != size[0]: + size = (new_width, size[1]) + return image.resize(size, resample=method) + + +def pad(image, size, method=Image.Resampling.BICUBIC, color=None, centering=(0.5, 0.5)): + """ + Returns a resized and padded version of the image, expanded to fill the + requested aspect ratio and size. + + :param image: The image to resize and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :param color: The background color of the padded image. + :param centering: Control the position of the original image within the + padded version. + + (0.5, 0.5) will keep the image centered + (0, 0) will keep the image aligned to the top left + (1, 1) will keep the image aligned to the bottom + right + :return: An image. + """ + + resized = contain(image, size, method) + if resized.size == size: + out = resized + else: + out = Image.new(image.mode, size, color) + if resized.palette: + out.putpalette(resized.getpalette()) + if resized.width != size[0]: + x = round((size[0] - resized.width) * max(0, min(centering[0], 1))) + out.paste(resized, (x, 0)) + else: + y = round((size[1] - resized.height) * max(0, min(centering[1], 1))) + out.paste(resized, (0, y)) + return out + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop((left, top, image.size[0] - right, image.size[1] - bottom)) + + +def scale(image, factor, resample=Image.Resampling.BICUBIC): + """ + Returns a rescaled image by a specific factor given in parameter. + A factor greater than 1 expands the image, between 0 and 1 contracts the + image. + + :param image: The image to rescale. + :param factor: The expansion factor, as a float. + :param resample: Resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + if factor == 1: + return image.copy() + elif factor <= 0: + raise ValueError("the factor must be greater than 0") + else: + size = (round(factor * image.width), round(factor * image.height)) + return image.resize(size, resample) + + +def deform(image, deformer, resample=Image.Resampling.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + ``getmesh`` method can be used. + :param resample: An optional resampling filter. Same values possible as + in the PIL.Image.transform function. + :return: An image. + """ + return image.transform( + image.size, Image.Transform.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b : b + 256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i + b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + color = _color(fill, image.mode) + if image.palette: + palette = ImagePalette.ImagePalette(palette=image.getpalette()) + if isinstance(color, tuple): + color = palette.getcolor(color) + else: + palette = None + out = Image.new(image.mode, (width, height), color) + if palette: + out.putpalette(palette.palette) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.Resampling.BICUBIC, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a resized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param image: The image to resize and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :param bleed: Remove a border around the outside of the image from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + Cannot be greater than or equal to 0.5. + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # https://www.cazabon.com + + # ensure centering is mutable + centering = list(centering) + + if not 0.0 <= centering[0] <= 1.0: + centering[0] = 0.5 + if not 0.0 <= centering[1] <= 1.0: + centering[1] = 0.5 + + if not 0.0 <= bleed < 0.5: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleed_pixels = (bleed * image.size[0], bleed * image.size[1]) + + live_size = ( + image.size[0] - bleed_pixels[0] * 2, + image.size[1] - bleed_pixels[1] * 2, + ) + + # calculate the aspect ratio of the live_size + live_size_ratio = live_size[0] / live_size[1] + + # calculate the aspect ratio of the output image + output_ratio = size[0] / size[1] + + # figure out if the sides or top/bottom will be cropped off + if live_size_ratio == output_ratio: + # live_size is already the needed ratio + crop_width = live_size[0] + crop_height = live_size[1] + elif live_size_ratio >= output_ratio: + # live_size is wider than what's needed, crop the sides + crop_width = output_ratio * live_size[1] + crop_height = live_size[1] + else: + # live_size is taller than what's needed, crop the top and bottom + crop_width = live_size[0] + crop_height = live_size[0] / output_ratio + + # make the crop + crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0] + crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1] + + crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height) + + # resize the image and return it + return image.resize(size, method, box=crop) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255 - i) + return image.point(lut) if image.mode == "1" else _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2 ** (8 - bits) - 1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255 - i) + return _lut(image, lut) + + +def exif_transpose(image): + """ + If an image has an EXIF Orientation tag, other than 1, return a new image + that is transposed accordingly. The new image will have the orientation + data removed. + + Otherwise, return a copy of the image. + + :param image: The image to transpose. + :return: An image. + """ + exif = image.getexif() + orientation = exif.get(0x0112) + method = { + 2: Image.Transpose.FLIP_LEFT_RIGHT, + 3: Image.Transpose.ROTATE_180, + 4: Image.Transpose.FLIP_TOP_BOTTOM, + 5: Image.Transpose.TRANSPOSE, + 6: Image.Transpose.ROTATE_270, + 7: Image.Transpose.TRANSVERSE, + 8: Image.Transpose.ROTATE_90, + }.get(orientation) + if method is not None: + transposed_image = image.transpose(method) + transposed_exif = transposed_image.getexif() + if 0x0112 in transposed_exif: + del transposed_exif[0x0112] + if "exif" in transposed_image.info: + transposed_image.info["exif"] = transposed_exif.tobytes() + elif "Raw profile type exif" in transposed_image.info: + transposed_image.info[ + "Raw profile type exif" + ] = transposed_exif.tobytes().hex() + elif "XML:com.adobe.xmp" in transposed_image.info: + for pattern in ( + r'tiff:Orientation="([0-9])"', + r"([0-9])", + ): + transposed_image.info["XML:com.adobe.xmp"] = re.sub( + pattern, "", transposed_image.info["XML:com.adobe.xmp"] + ) + return transposed_image + return image.copy() diff --git a/sbsheriff/Lib/site-packages/PIL/ImagePalette.py b/sbsheriff/Lib/site-packages/PIL/ImagePalette.py new file mode 100644 index 0000000..fe76c86 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImagePalette.py @@ -0,0 +1,268 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array + +from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile +from ._deprecate import deprecate + + +class ImagePalette: + """ + Color palette for palette mapped images + + :param mode: The mode to use for the palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255. The list must consist of + all channels for one color followed by the next color (e.g. RGBRGBRGB). + Defaults to an empty palette. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or bytearray() + self.dirty = None + if size != 0: + deprecate("The size parameter", 10, None) + if size != len(self.palette): + raise ValueError("wrong palette size") + + @property + def palette(self): + return self._palette + + @palette.setter + def palette(self, palette): + self._colors = None + self._palette = palette + + @property + def colors(self): + if self._colors is None: + mode_len = len(self.mode) + self._colors = {} + for i in range(0, len(self.palette), mode_len): + color = tuple(self.palette[i : i + mode_len]) + if color in self._colors: + continue + self._colors[color] = i // mode_len + return self._colors + + @colors.setter + def colors(self, colors): + self._colors = colors + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode, self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + return arr.tobytes() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color, image=None): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + if self.mode == "RGB": + if len(color) == 4: + if color[3] != 255: + raise ValueError( + "cannot add non-opaque RGBA color to RGB palette" + ) + color = color[:3] + elif self.mode == "RGBA": + if len(color) == 3: + color += (255,) + try: + return self.colors[color] + except KeyError as e: + # allocate new color slot + if not isinstance(self.palette, bytearray): + self._palette = bytearray(self.palette) + index = len(self.palette) // 3 + special_colors = () + if image: + special_colors = ( + image.info.get("background"), + image.info.get("transparency"), + ) + while index in special_colors: + index += 1 + if index >= 256: + if image: + # Search for an unused index + for i, count in reversed(list(enumerate(image.histogram()))): + if count == 0 and i not in special_colors: + index = i + break + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") from e + self.colors[color] = index + if index * 3 < len(self.palette): + self._palette = ( + self.palette[: index * 3] + + bytes(color) + + self.palette[index * 3 + 3 :] + ) + else: + self._palette += bytes(color) + self.dirty = 1 + return index + else: + raise ValueError(f"unknown color specifier: {repr(color)}") + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write(f"# Mode: {self.mode}\n") + for i in range(256): + fp.write(f"{i}") + for j in range(i * len(self.mode), (i + 1) * len(self.mode)): + try: + fp.write(f" {self.palette[j]}") + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white * i // 255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256 * len(mode))) + palette.reverse() + return ImagePalette(mode, [i // len(mode) for i in palette]) + + +def random(mode="RGB"): + from random import randint + + palette = [] + for i in range(256 * len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)] + return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)]) + + +def wedge(mode="RGB"): + palette = list(range(256 * len(mode))) + return ImagePalette(mode, [i // len(mode) for i in palette]) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + with open(filename, "rb") as fp: + + for paletteHandler in [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile, + ]: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + else: + raise OSError("cannot load palette") + + return lut # data, rawmode diff --git a/sbsheriff/Lib/site-packages/PIL/ImagePath.py b/sbsheriff/Lib/site-packages/PIL/ImagePath.py new file mode 100644 index 0000000..3d3538c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImagePath.py @@ -0,0 +1,19 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + +Path = Image.core.path diff --git a/sbsheriff/Lib/site-packages/PIL/ImageQt.py b/sbsheriff/Lib/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000..a34678c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageQt.py @@ -0,0 +1,228 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import sys +from io import BytesIO + +from . import Image +from ._deprecate import deprecate +from ._util import is_path + +qt_versions = [ + ["6", "PyQt6"], + ["side6", "PySide6"], + ["5", "PyQt5"], + ["side2", "PySide2"], +] + +# If a version has already been imported, attempt it first +qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True) +for qt_version, qt_module in qt_versions: + try: + if qt_module == "PyQt6": + from PyQt6.QtCore import QBuffer, QIODevice + from PyQt6.QtGui import QImage, QPixmap, qRgba + elif qt_module == "PySide6": + from PySide6.QtCore import QBuffer, QIODevice + from PySide6.QtGui import QImage, QPixmap, qRgba + elif qt_module == "PyQt5": + from PyQt5.QtCore import QBuffer, QIODevice + from PyQt5.QtGui import QImage, QPixmap, qRgba + + deprecate("Support for PyQt5", 10, "PyQt6 or PySide6") + elif qt_module == "PySide2": + from PySide2.QtCore import QBuffer, QIODevice + from PySide2.QtGui import QImage, QPixmap, qRgba + + deprecate("Support for PySide2", 10, "PyQt6 or PySide6") + except (ImportError, RuntimeError): + continue + qt_is_installed = True + break +else: + qt_is_installed = False + qt_version = None + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return qRgba(r, g, b, a) & 0xFFFFFFFF + + +def fromqimage(im): + """ + :param im: QImage or PIL ImageQt object + """ + buffer = QBuffer() + if qt_version == "6": + try: + qt_openmode = QIODevice.OpenModeFlag + except AttributeError: + qt_openmode = QIODevice.OpenMode + else: + qt_openmode = QIODevice + buffer.open(qt_openmode.ReadWrite) + # preserve alpha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, "png") + else: + im.save(buffer, "ppm") + + b = BytesIO() + b.write(buffer.data()) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append( + bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + + b"\x00" * extra_padding + ) + + return b"".join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + exclusive_fp = False + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + im = str(im.toUtf8(), "utf-8") + if is_path(im): + im = Image.open(im) + exclusive_fp = True + + qt_format = QImage.Format if qt_version == "6" else QImage + if im.mode == "1": + format = qt_format.Format_Mono + elif im.mode == "L": + format = qt_format.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = qt_format.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i : i + 3])) + elif im.mode == "RGB": + # Populate the 4th channel with 255 + im = im.convert("RGBA") + + data = im.tobytes("raw", "BGRA") + format = qt_format.Format_RGB32 + elif im.mode == "RGBA": + data = im.tobytes("raw", "BGRA") + format = qt_format.Format_ARGB32 + elif im.mode == "I;16" and hasattr(qt_format, "Format_Grayscale16"): # Qt 5.13+ + im = im.point(lambda i: i * 256) + + format = qt_format.Format_Grayscale16 + else: + if exclusive_fp: + im.close() + raise ValueError(f"unsupported image mode {repr(im.mode)}") + + size = im.size + __data = data or align8to32(im.tobytes(), size[0], im.mode) + if exclusive_fp: + im.close() + return {"data": __data, "size": size, "format": format, "colortable": colortable} + + +if qt_is_installed: + + class ImageQt(QImage): + def __init__(self, im): + """ + An PIL image wrapper for Qt. This is a subclass of PyQt's QImage + class. + + :param im: A PIL Image object, or a file name (given either as + Python string or a PyQt string object). + """ + im_data = _toqclass_helper(im) + # must keep a reference, or Qt will crash! + # All QImage constructors that take data operate on an existing + # buffer, so this buffer has to hang on for the life of the image. + # Fixes https://github.com/python-pillow/Pillow/issues/1370 + self.__data = im_data["data"] + super().__init__( + self.__data, + im_data["size"][0], + im_data["size"][1], + im_data["format"], + ) + if im_data["colortable"]: + self.setColorTable(im_data["colortable"]) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data["size"][0], im_data["size"][1]) + # result.loadFromData(im_data["data"]) + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageSequence.py b/sbsheriff/Lib/site-packages/PIL/ImageSequence.py new file mode 100644 index 0000000..9df910a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageSequence.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator: + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + self.position = getattr(self.im, "_min_frame", 0) + + def __getitem__(self, ix): + try: + self.im.seek(ix) + return self.im + except EOFError as e: + raise IndexError from e # end of sequence + + def __iter__(self): + return self + + def __next__(self): + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError as e: + raise StopIteration from e + + +def all_frames(im, func=None): + """ + Applies a given function to all frames in an image or a list of images. + The frames are returned as a list of separate images. + + :param im: An image, or a list of images. + :param func: The function to apply to all of the image frames. + :returns: A list of images. + """ + if not isinstance(im, list): + im = [im] + + ims = [] + for imSequence in im: + current = imSequence.tell() + + ims += [im_frame.copy() for im_frame in Iterator(imSequence)] + + imSequence.seek(current) + return [func(im) for im in ims] if func else ims diff --git a/sbsheriff/Lib/site-packages/PIL/ImageShow.py b/sbsheriff/Lib/site-packages/PIL/ImageShow.py new file mode 100644 index 0000000..76f42a3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageShow.py @@ -0,0 +1,392 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# +import os +import shutil +import subprocess +import sys +from shlex import quote + +from PIL import Image + +from ._deprecate import deprecate + +_viewers = [] + + +def register(viewer, order=1): + """ + The :py:func:`register` function is used to register additional viewers:: + + from PIL import ImageShow + ImageShow.register(MyViewer()) # MyViewer will be used as a last resort + ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised + ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised + + :param viewer: The viewer to be registered. + :param order: + Zero or a negative integer to prepend this viewer to the list, + a positive integer to append it. + """ + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + else: + _viewers.insert(0, viewer) + + +def show(image, title=None, **options): + r""" + Display a given image. + + :param image: An image object. + :param title: Optional title. Not all viewers can display the title. + :param \**options: Additional viewer options. + :returns: ``True`` if a suitable viewer was found, ``False`` otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return True + return False + + +class Viewer: + """Base class for viewers.""" + + # main api + + def show(self, image, **options): + """ + The main function for displaying an image. + Converts the given image to the target format and displays it. + """ + + if not ( + image.mode in ("1", "RGBA") + or (self.format == "PNG" and image.mode in ("I;16", "LA")) + ): + base = Image.getmodebase(image.mode) + if image.mode != base: + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + """The format to convert the image into.""" + options = {} + """Additional options used to convert the image.""" + + def get_format(self, image): + """Return format name, or ``None`` to save as PGM/PPM.""" + return self.format + + def get_command(self, file, **options): + """ + Returns the command used to display the file. + Not implemented in the base class. + """ + raise NotImplementedError + + def save_image(self, image): + """Save to temporary file and return filename.""" + return image._dump(format=self.get_format(image), **self.options) + + def show_image(self, image, **options): + """Display the given image.""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and will be removed in Pillow 10.0.0 (2023-07-01). ``path`` should be used + instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + os.system(self.get_command(path, **options)) + return 1 + + +# -------------------------------------------------------------------- + + +class WindowsViewer(Viewer): + """The default viewer on Windows is the default system application for PNG files.""" + + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + def get_command(self, file, **options): + return ( + f'start "Pillow" /WAIT "{file}" ' + "&& ping -n 4 127.0.0.1 >NUL " + f'&& del /f "{file}"' + ) + + +if sys.platform == "win32": + register(WindowsViewer) + + +class MacViewer(Viewer): + """The default viewer on macOS using ``Preview.app``.""" + + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a Preview.app" + command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&" + return command + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and will be removed in Pillow 10.0.0 (2023-07-01). ``path`` should be used + instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + subprocess.call(["open", "-a", "Preview.app", path]) + executable = sys.executable or shutil.which("python3") + if executable: + subprocess.Popen( + [ + executable, + "-c", + "import os, sys, time; time.sleep(20); os.remove(sys.argv[1])", + path, + ] + ) + return 1 + + +if sys.platform == "darwin": + register(MacViewer) + + +class UnixViewer(Viewer): + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + def get_command(self, file, **options): + command = self.get_command_ex(file, **options)[0] + return f"({command} {quote(file)}" + + +class XDGViewer(UnixViewer): + """ + The freedesktop.org ``xdg-open`` command. + """ + + def get_command_ex(self, file, **options): + command = executable = "xdg-open" + return command, executable + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and will be removed in Pillow 10.0.0 (2023-07-01). ``path`` should be used + instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + subprocess.Popen(["xdg-open", path]) + return 1 + + +class DisplayViewer(UnixViewer): + """ + The ImageMagick ``display`` command. + This viewer supports the ``title`` parameter. + """ + + def get_command_ex(self, file, title=None, **options): + command = executable = "display" + if title: + command += f" -title {quote(title)}" + return command, executable + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and ``path`` should be used instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + args = ["display"] + title = options.get("title") + if title: + args += ["-title", title] + args.append(path) + + subprocess.Popen(args) + return 1 + + +class GmDisplayViewer(UnixViewer): + """The GraphicsMagick ``gm display`` command.""" + + def get_command_ex(self, file, **options): + executable = "gm" + command = "gm display" + return command, executable + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and ``path`` should be used instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + subprocess.Popen(["gm", "display", path]) + return 1 + + +class EogViewer(UnixViewer): + """The GNOME Image Viewer ``eog`` command.""" + + def get_command_ex(self, file, **options): + executable = "eog" + command = "eog -n" + return command, executable + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and ``path`` should be used instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + subprocess.Popen(["eog", "-n", path]) + return 1 + + +class XVViewer(UnixViewer): + """ + The X Viewer ``xv`` command. + This viewer supports the ``title`` parameter. + """ + + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += f" -name {quote(title)}" + return command, executable + + def show_file(self, path=None, **options): + """ + Display given file. + + Before Pillow 9.1.0, the first argument was ``file``. This is now deprecated, + and ``path`` should be used instead. + """ + if path is None: + if "file" in options: + deprecate("The 'file' argument", 10, "'path'") + path = options.pop("file") + else: + raise TypeError("Missing required argument: 'path'") + args = ["xv"] + title = options.get("title") + if title: + args += ["-name", title] + args.append(path) + + subprocess.Popen(args) + return 1 + + +if sys.platform not in ("win32", "darwin"): # unixoids + if shutil.which("xdg-open"): + register(XDGViewer) + if shutil.which("display"): + register(DisplayViewer) + if shutil.which("gm"): + register(GmDisplayViewer) + if shutil.which("eog"): + register(EogViewer) + if shutil.which("xv"): + register(XVViewer) + + +class IPythonViewer(Viewer): + """The viewer for IPython frontends.""" + + def show_image(self, image, **options): + ipython_display(image) + return 1 + + +try: + from IPython.display import display as ipython_display +except ImportError: + pass +else: + register(IPythonViewer) + + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python3 ImageShow.py imagefile [title]") + sys.exit() + + with Image.open(sys.argv[1]) as im: + print(show(im, *sys.argv[2:])) diff --git a/sbsheriff/Lib/site-packages/PIL/ImageStat.py b/sbsheriff/Lib/site-packages/PIL/ImageStat.py new file mode 100644 index 0000000..1baef7d --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import functools +import math +import operator + + +class Stat: + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + """Calculate missing attribute""" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + """Get min/max values for each band in the image""" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + """Get total number of pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i : i + 256])) + return v + + def _getsum(self): + """Get sum of all pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + layer_sum = 0.0 + for j in range(256): + layer_sum += j * self.h[i + j] + v.append(layer_sum) + return v + + def _getsum2(self): + """Get squared sum of all pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j**2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + """Get average pixel level for each layer""" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + """Get median pixel level for each layer""" + + v = [] + for i in self.bands: + s = 0 + half = self.count[i] // 2 + b = i * 256 + for j in range(256): + s = s + self.h[b + j] + if s > half: + break + v.append(j) + return v + + def _getrms(self): + """Get RMS for each layer""" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + """Get variance for each layer""" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i] - (self.sum[i] ** 2.0) / n) / n) + return v + + def _getstddev(self): + """Get standard deviation for each layer""" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + + +Global = Stat # compatibility diff --git a/sbsheriff/Lib/site-packages/PIL/ImageTk.py b/sbsheriff/Lib/site-packages/PIL/ImageTk.py new file mode 100644 index 0000000..949cf1f --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageTk.py @@ -0,0 +1,291 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import tkinter +from io import BytesIO + +from . import Image +from ._deprecate import deprecate + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data=f"PIL:{im.im.id}") + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +def _pyimagingtkcall(command, photo, id): + tk = photo.tk + try: + tk.call(command, photo, id) + except tkinter.TclError: + # activate Tkinter hook + # may raise an error if it cannot attach to Tkinter + from . import _imagingtk + + _imagingtk.tkinit(tk.interpaddr()) + tk.call(command, photo, id) + + +# -------------------------------------------------------------------- +# PhotoImage + + +class PhotoImage: + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the ``file`` or ``data`` options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.apply_transparency() + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: Deprecated. This parameter will be removed in Pillow 10 + (2023-07-01). + """ + + if box is not None: + deprecate("The box parameter", 10, None) + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + _pyimagingtkcall("PyImagingPhoto", self.__photo, block.id) + + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage: + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is ``foreground``, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = f"PIL:{image.im.id}" + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + im = Image.new("RGBA", (photo.width(), photo.height())) + block = im.im + + _pyimagingtkcall("PyImagingPhotoGet", photo, block.id) + + return im + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + super().__init__(master, image=self.image, bg="black", bd=0) + + if not tkinter._default_root: + raise OSError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/sbsheriff/Lib/site-packages/PIL/ImageTransform.py b/sbsheriff/Lib/site-packages/PIL/ImageTransform.py new file mode 100644 index 0000000..7881f0d --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageTransform.py @@ -0,0 +1,102 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + See :py:meth:`~PIL.Image.Image.transform` + + :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + """ + + method = Image.Transform.AFFINE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + See :py:meth:`~PIL.Image.Image.transform` + + :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. See :ref:`coordinate-system`. + """ + + method = Image.Transform.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + See :py:meth:`~PIL.Image.Image.transform` + + :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + """ + + method = Image.Transform.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + See :py:meth:`~PIL.Image.Image.transform` + + :param data: A list of (bbox, quad) tuples. + """ + + method = Image.Transform.MESH diff --git a/sbsheriff/Lib/site-packages/PIL/ImageWin.py b/sbsheriff/Lib/site-packages/PIL/ImageWin.py new file mode 100644 index 0000000..ca9b14c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImageWin.py @@ -0,0 +1,230 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class HDC: + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND: + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib: + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + ``palette`` method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use + ``CDC.GetHandleAttrib()`` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. See :ref:`coordinate-system`. If + None is given instead of a tuple, all of the image is + assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + +class Window: + """Create a Window with the given title size.""" + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + super().__init__(title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/sbsheriff/Lib/site-packages/PIL/ImtImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..dc70780 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/ImtImagePlugin.py @@ -0,0 +1,105 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from . import Image, ImageFile + +# +# -------------------------------------------------------------------- + +field = re.compile(rb"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + buffer = self.fp.read(100) + if b"\n" not in buffer: + raise SyntaxError("not an IM file") + + xsize = ysize = 0 + + while True: + + if buffer: + s = buffer[:1] + buffer = buffer[1:] + else: + s = self.fp.read(1) + if not s: + break + + if s == b"\x0C": + + # image data begins + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell() - len(buffer), + (self.mode, 0, 1), + ) + ] + + break + + else: + + # read key/value pair + if b"\n" not in buffer: + buffer += self.fp.read(100) + lines = buffer.split(b"\n") + s += lines.pop(0) + buffer = b"\n".join(lines) + if len(s) == 1 or len(s) > 100: + break + if s[0] == ord(b"*"): + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == b"width": + xsize = int(v) + self._size = xsize, ysize + elif k == b"height": + ysize = int(v) + self._size = xsize, ysize + elif k == b"pixel" and v == b"n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/sbsheriff/Lib/site-packages/PIL/IptcImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..0bbe506 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/IptcImagePlugin.py @@ -0,0 +1,230 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# +import os +import tempfile + +from . import Image, ImageFile +from ._binary import i8 +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 + +COMPRESSION = {1: "raw", 5: "jpeg"} + +PAD = o8(0) * 4 + + +# +# Helpers + + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=" ") + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = s[1], s[2] + + # syntax + if s[0] != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = s[3] + if size > 132: + raise OSError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size - 128)) + else: + size = i16(s, 3) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in self.info: + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0]) - 1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self._size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError as e: + raise OSError("Unknown IPTC image compression") from e + + # tile + if tag == (8, 10): + self.tile = [ + ("iptc", (compression, offset), (0, 0, self.size[0], self.size[1])) + ] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + with Image.open(outfile) as _im: + _im.load() + self.im = _im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +def getiptcinfo(im): + """ + Get IPTC information from TIFF, JPEG, or IPTC file. + + :param im: An image containing IPTC data. + :returns: A dictionary containing IPTC information, or None if + no IPTC information block was found. + """ + import io + + from . import JpegImagePlugin, TiffImagePlugin + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + photoshop = im.info.get("photoshop") + if photoshop: + data = photoshop.get(0x0404) + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage: + pass + + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/sbsheriff/Lib/site-packages/PIL/Jpeg2KImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..c67d8d6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,362 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# 2021-06-30 rogermb Extract dpi information from the 'resc' header box +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +import io +import os +import struct + +from . import Image, ImageFile + + +class BoxReader: + """ + A small helper class to read fields stored in JPEG2000 header boxes + and to easily step into and read sub-boxes. + """ + + def __init__(self, fp, length=-1): + self.fp = fp + self.has_length = length >= 0 + self.length = length + self.remaining_in_box = -1 + + def _can_read(self, num_bytes): + if self.has_length and self.fp.tell() + num_bytes > self.length: + # Outside box: ensure we don't read past the known file length + return False + if self.remaining_in_box >= 0: + # Inside box contents: ensure read does not go past box boundaries + return num_bytes <= self.remaining_in_box + else: + return True # No length known, just read + + def _read_bytes(self, num_bytes): + if not self._can_read(num_bytes): + raise SyntaxError("Not enough data in header") + + data = self.fp.read(num_bytes) + if len(data) < num_bytes: + raise OSError( + f"Expected to read {num_bytes} bytes but only got {len(data)}." + ) + + if self.remaining_in_box > 0: + self.remaining_in_box -= num_bytes + return data + + def read_fields(self, field_format): + size = struct.calcsize(field_format) + data = self._read_bytes(size) + return struct.unpack(field_format, data) + + def read_boxes(self): + size = self.remaining_in_box + data = self._read_bytes(size) + return BoxReader(io.BytesIO(data), size) + + def has_next_box(self): + if self.has_length: + return self.fp.tell() + self.remaining_in_box < self.length + else: + return True + + def next_box_type(self): + # Skip the rest of the box if it has not been read + if self.remaining_in_box > 0: + self.fp.seek(self.remaining_in_box, os.SEEK_CUR) + self.remaining_in_box = -1 + + # Read the length and type of the next box + lbox, tbox = self.read_fields(">I4s") + if lbox == 1: + lbox = self.read_fields(">Q")[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen or not self._can_read(lbox - hlen): + raise SyntaxError("Invalid header length") + + self.remaining_in_box = lbox - hlen + return tbox + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack(">H", hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( + ">HHIIIIIIIIH", siz + ) + ssiz = [None] * csiz + xrsiz = [None] * csiz + yrsiz = [None] * csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7F) > 8: + mode = "I;16" + else: + mode = "L" + elif csiz == 2: + mode = "LA" + elif csiz == 3: + mode = "RGB" + elif csiz == 4: + mode = "RGBA" + else: + mode = None + + return size, mode + + +def _res_to_dpi(num, denom, exp): + """Convert JPEG2000's (numerator, denominator, exponent-base-10) resolution, + calculated as (num / denom) * 10^exp and stored in dots per meter, + to floating-point dots per inch.""" + if denom != 0: + return (254 * num * (10**exp)) / (10000 * denom) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count, + color space information, and optionally DPI information, + returning a (size, mode, mimetype, dpi) tuple.""" + + # Find the JP2 header box + reader = BoxReader(fp) + header = None + mimetype = None + while reader.has_next_box(): + tbox = reader.next_box_type() + + if tbox == b"jp2h": + header = reader.read_boxes() + break + elif tbox == b"ftyp": + if reader.read_fields(">4s")[0] == b"jpx ": + mimetype = "image/jpx" + + size = None + mode = None + bpc = None + nc = None + dpi = None # 2-tuple of DPI info, or None + + while header.has_next_box(): + tbox = header.next_box_type() + + if tbox == b"ihdr": + height, width, nc, bpc = header.read_fields(">IIHB") + size = (width, height) + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" + elif nc == 1: + mode = "L" + elif nc == 2: + mode = "LA" + elif nc == 3: + mode = "RGB" + elif nc == 4: + mode = "RGBA" + elif tbox == b"res ": + res = header.read_boxes() + while res.has_next_box(): + tres = res.next_box_type() + if tres == b"resc": + vrcn, vrcd, hrcn, hrcd, vrce, hrce = res.read_fields(">HHHHBB") + hres = _res_to_dpi(hrcn, hrcd, hrce) + vres = _res_to_dpi(vrcn, vrcd, vrce) + if hres is not None and vres is not None: + dpi = (hres, vres) + break + + if size is None or mode is None: + raise SyntaxError("Malformed JP2 header") + + return size, mode, mimetype, dpi + + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b"\xff\x4f\xff\x51": + self.codec = "j2k" + self._size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a": + self.codec = "jp2" + header = _parse_jp2_header(self.fp) + self._size, self.mode, self.custom_mimetype, dpi = header + if dpi is not None: + self.info["dpi"] = dpi + else: + raise SyntaxError("not a JPEG 2000 file") + + if self.size is None or self.mode is None: + raise SyntaxError("unable to determine size/mode") + + self._reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except Exception: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, io.SEEK_END) + length = self.fp.tell() + self.fp.seek(pos) + except Exception: + length = -1 + + self.tile = [ + ( + "jpeg2k", + (0, 0) + self.size, + 0, + (self.codec, self._reduce, self.layers, fd, length), + ) + ] + + @property + def reduce(self): + # https://github.com/python-pillow/Pillow/issues/4343 found that the + # new Image 'reduce' method was shadowed by this plugin's 'reduce' + # property. This attempts to allow for both scenarios + return self._reduce or super().reduce + + @reduce.setter + def reduce(self, value): + self._reduce = value + + def load(self): + if self.tile and self._reduce: + power = 1 << self._reduce + adjust = power >> 1 + self._size = ( + int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power), + ) + + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + return ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return ( + prefix[:4] == b"\xff\x4f\xff\x51" + or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ) + + +# ------------------------------------------------------------ +# Save support + + +def _save(im, fp, filename): + # Get the keyword arguments + info = im.encoderinfo + + if filename.endswith(".j2k") or info.get("no_jp2", False): + kind = "j2k" + else: + kind = "jp2" + + offset = info.get("offset", None) + tile_offset = info.get("tile_offset", None) + tile_size = info.get("tile_size", None) + quality_mode = info.get("quality_mode", "rates") + quality_layers = info.get("quality_layers", None) + if quality_layers is not None and not ( + isinstance(quality_layers, (list, tuple)) + and all( + [ + isinstance(quality_layer, (int, float)) + for quality_layer in quality_layers + ] + ) + ): + raise ValueError("quality_layers must be a sequence of numbers") + + num_resolutions = info.get("num_resolutions", 0) + cblk_size = info.get("codeblock_size", None) + precinct_size = info.get("precinct_size", None) + irreversible = info.get("irreversible", False) + progression = info.get("progression", "LRCP") + cinema_mode = info.get("cinema_mode", "no") + mct = info.get("mct", 0) + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except Exception: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + mct, + fd, + ) + + ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)]) + + +# ------------------------------------------------------------ +# Registry stuff + + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extensions( + Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"] +) + +Image.register_mime(Jpeg2KImageFile.format, "image/jp2") diff --git a/sbsheriff/Lib/site-packages/PIL/JpegImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..a6ed223 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/JpegImagePlugin.py @@ -0,0 +1,827 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +import array +import io +import math +import os +import struct +import subprocess +import sys +import tempfile +import warnings + +from . import Image, ImageFile, TiffImagePlugin +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._deprecate import deprecate +from .JpegPresets import presets + +# +# Parser + + +def Skip(self, marker): + n = i16(self.fp.read(2)) - 2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = s[7] + jfif_density = i16(s, 8), i16(s, 10) + except Exception: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + if "exif" not in self.info: + # extract EXIF information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00": + # parse the image resource block + offset = 14 + photoshop = self.info.setdefault("photoshop", {}) + while s[offset : offset + 4] == b"8BIM": + try: + offset += 4 + # resource code + code = i16(s, offset) + offset += 2 + # resource name (usually empty) + name_len = s[offset] + # name = s[offset+1:offset+1+name_len] + offset += 1 + name_len + offset += offset & 1 # align + # resource data block + size = i32(s, offset) + offset += 4 + data = s[offset : offset + size] + if code == 0x03ED: # ResolutionInfo + data = { + "XResolution": i32(data, 0) / 65536, + "DisplayedUnitsX": i16(data, 4), + "YResolution": i32(data, 8) / 65536, + "DisplayedUnitsY": i16(data, 12), + } + photoshop[code] = data + offset += size + offset += offset & 1 # align + except struct.error: + break # insufficient data + + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = s[11] + except IndexError: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + # If DPI isn't in JPEG header, fetch from EXIF + if "dpi" not in self.info and "exif" in self.info: + try: + exif = self.getexif() + resolution_unit = exif[0x0128] + x_resolution = exif[0x011A] + try: + dpi = float(x_resolution[0]) / x_resolution[1] + except TypeError: + dpi = x_resolution + if math.isnan(dpi): + raise ValueError + if resolution_unit == 3: # cm + # 1 dpcm = 2.54 dpi + dpi *= 2.54 + self.info["dpi"] = dpi, dpi + except (TypeError, KeyError, SyntaxError, ValueError, ZeroDivisionError): + # SyntaxError for invalid/unreadable EXIF + # KeyError for dpi not included + # ZeroDivisionError for invalid dpi rational value + # ValueError or TypeError for dpi being an invalid float + self.info["dpi"] = 72, 72 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + self.info["comment"] = s + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + self._size = i16(s, 3), i16(s, 1) + + self.bits = s[0] + if self.bits != 8: + raise SyntaxError(f"cannot handle {self.bits}-bit layers") + + self.layers = s[5] + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError(f"cannot handle {self.layers}-layer images") + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if self.icclist[0][13] == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = [] + + for i in range(6, len(s), 3): + t = s[i : i + 3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2])) + + +def DQT(self, marker): + # + # Define quantization table. Note that there might be more + # than one table in each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + v = s[0] + precision = 1 if (v // 16 == 0) else 2 # in bytes + qt_length = 1 + precision * 64 + if len(s) < qt_length: + raise SyntaxError("bad quantization table marker") + data = array.array("B" if precision == 1 else "H", s[1:qt_length]) + if sys.byteorder == "little" and precision > 1: + data.byteswap() # the values are always big-endian + self.quantization[v & 15] = [data[i] for i in zigzag_index] + s = s[qt_length:] + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM), +} + + +def _accept(prefix): + # Magic number was taken from https://en.wikipedia.org/wiki/JPEG + return prefix[:3] == b"\xFF\xD8\xFF" + + +## +# Image plugin for JPEG and JFIF images. + + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(3) + + if not _accept(s): + raise SyntaxError("not a JPEG file") + s = b"\xFF" + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = s[0] + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + raise SyntaxError("no marker found") + + def load_read(self, read_bytes): + """ + internal: read more image data + For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker + so libjpeg can finish decoding + """ + s = self.fp.read(read_bytes) + + if not s and ImageFile.LOAD_TRUNCATED_IMAGES and not hasattr(self, "_ended"): + # Premature EOF. + # Pretend file is finished adding EOI marker + self._ended = True + return b"\xFF\xD9" + + return s + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + # Protect from second call + if self.decoderconfig: + return + + d, e, o, a = self.tile[0] + scale = 1 + original_size = self.size + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = min(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = ( + e[0], + e[1], + (e[2] - e[0] + s - 1) // s + e[0], + (e[3] - e[1] + s - 1) // s + e[1], + ) + self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + box = (0, 0, original_size[0] / scale, original_size[1] / scale) + return self.mode, box + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + with Image.open(path) as _im: + _im.load() + self.im = _im.im + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self._size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + def getxmp(self): + """ + Returns a dictionary containing the XMP tags. + Requires defusedxml to be installed. + + :returns: XMP tags in a dictionary. + """ + + for segment, content in self.applist: + if segment == "APP1": + marker, xmp_tags = content.rsplit(b"\x00", 1) + if marker == b"http://ns.adobe.com/xap/1.0/": + return self._getxmp(xmp_tags) + return {} + + +def _getexif(self): + if "exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plugin. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + file_contents.seek(info.next) + info.load(file_contents) + mp = dict(info) + except Exception as e: + raise SyntaxError("malformed MP Index (unreadable directory)") from e + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError as e: + raise SyntaxError("malformed MP Index (no number of images)") from e + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = struct.unpack_from( + f"{endianness}LLLHH", rawmpentries, entrynum * 16 + ) + labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), + "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), + "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), + "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, + "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, + "MPType": mpentry["Attribute"] & 0x00FFFFFF, + } + if mpentryattr["ImageDataFormat"] == 0: + mpentryattr["ImageDataFormat"] = "JPEG" + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: "Undefined", + 0x010001: "Large Thumbnail (VGA Equivalent)", + 0x010002: "Large Thumbnail (Full HD Equivalent)", + 0x020001: "Multi-Frame Image (Panorama)", + 0x020002: "Multi-Frame Image: (Disparity)", + 0x020003: "Multi-Frame Image: (Multi-Angle)", + 0x030000: "Baseline MP Primary Image", + } + mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") + mpentry["Attribute"] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError as e: + raise SyntaxError("malformed MP Index (bad MP Entry)") from e + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +# fmt: off +zigzag_index = ( + 0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63, +) + +samplings = { + (1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, +} +# fmt: on + + +def convert_dict_qtables(qtables): + deprecate("convert_dict_qtables", 10, action="Conversion is no longer needed") + return qtables + + +def get_sampling(im): + # There's no subsampling when images have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to the default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, "layers") or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + if im.width == 0 or im.height == 0: + raise ValueError("cannot write empty image as JPEG") + + try: + rawmode = RAWMODE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as JPEG") from e + + info = im.encoderinfo + + dpi = [round(x) for x in info.get("dpi", (0, 0))] + + quality = info.get("quality", -1) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = -1 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = -1 + subsampling = preset.get("subsampling", -1) + qtables = preset.get("quantization") + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get("subsampling", -1) + if isinstance(qtables, str) and qtables in presets: + qtables = presets[qtables].get("quantization") + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:2:0": + subsampling = 2 + elif subsampling == "4:1:1": + # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0. + # Set 4:2:0 if someone is still using that value. + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError("Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isinstance(qtables, str): + try: + lines = [ + int(num) + for line in qtables.splitlines() + for num in line.split("#", 1)[0].split() + ] + except ValueError as e: + raise ValueError("Invalid quantization table") from e + else: + qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = [ + qtables[key] for key in range(len(qtables)) if key in qtables + ] + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise TypeError + table = array.array("H", table) + except TypeError as e: + raise ValueError("Invalid quantization table") from e + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError("Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = info.get("extra", b"") + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += ( + b"\xFF\xE2" + + size + + b"ICC_PROFILE\0" + + o8(i) + + o8(len(markers)) + + marker + ) + i += 1 + + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + progressive = info.get("progressive", False) or info.get("progression", False) + + optimize = info.get("optimize", False) + + exif = info.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + + # get keyword arguments + im.encoderconfig = ( + quality, + progressive, + info.get("smooth", 0), + optimize, + info.get("streamtype", 0), + dpi[0], + dpi[1], + subsampling, + qtables, + extra, + exif, + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if optimize or progressive: + # CMYK can be bigger + if im.mode == "CMYK": + bufsize = 4 * im.size[0] * im.size[1] + # keep sets quality to -1, but the actual value may be high. + elif quality >= 95 or quality == -1: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The EXIF info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough. Same with the icc_profile block. + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(exif) + 5, len(extra) + 1) + + ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + + # Don't reload everything, just convert it. + im = MpoImageFile.adopt(im, mpheader) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn( + "Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file" + ) + return im + + +# --------------------------------------------------------------------- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"]) + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/sbsheriff/Lib/site-packages/PIL/JpegPresets.py b/sbsheriff/Lib/site-packages/PIL/JpegPresets.py new file mode 100644 index 0000000..a678e24 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/JpegPresets.py @@ -0,0 +1,240 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. +Can be used when saving JPEG files. + +The following presets are available by default: +``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``, +``low``, ``medium``, ``high``, ``maximum``. +More presets can be added to the :py:data:`presets` dict if needed. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:2:0. + +You can get the subsampling of a JPEG with the +:func:`.JpegImagePlugin.get_sampling` function. + +In JPEG compressed data a JPEG marker is used instead of an EXIF tag. +(ref.: https://exiv2.org/tags.html) + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of lists. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The quantization table format in presets is a list with sublists. These formats +are interchangeable. + +Libjpeg ref.: +https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +# fmt: off +presets = { + 'web_low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} +# fmt: on diff --git a/sbsheriff/Lib/site-packages/PIL/McIdasImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..cd047fe --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/McIdasImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct + +from . import Image, ImageFile + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self._size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10] * w[11] * w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/sbsheriff/Lib/site-packages/PIL/MicImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/MicImagePlugin.py new file mode 100644 index 0000000..d4f6c90 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/MicImagePlugin.py @@ -0,0 +1,97 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +import olefile + +from . import Image, TiffImagePlugin + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + raise SyntaxError("not an MIC file; invalid OLE file") from e + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.frame = None + self._n_frames = len(self.images) + self.is_animated = self._n_frames > 1 + + if len(self.images) > 1: + self._category = Image.CONTAINER + + self.seek(0) + + def seek(self, frame): + if not self._seek_check(frame): + return + try: + filename = self.images[frame] + except IndexError as e: + raise EOFError("no such frame") from e + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + return self.frame + + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/sbsheriff/Lib/site-packages/PIL/MpegImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..a358dfd --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/MpegImagePlugin.py @@ -0,0 +1,83 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i8 + +# +# Bitstream parser + + +class BitStream: + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self._size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/sbsheriff/Lib/site-packages/PIL/MpoImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..5bfd8ef --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,181 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +import itertools +import os +import struct + +from . import Image, ImageFile, ImageSequence, JpegImagePlugin, TiffImagePlugin +from ._binary import i16be as i16 +from ._binary import o32le + +# def _accept(prefix): +# return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + JpegImagePlugin._save(im, fp, filename) + + +def _save_all(im, fp, filename): + append_images = im.encoderinfo.get("append_images", []) + if not append_images: + try: + animated = im.is_animated + except AttributeError: + animated = False + if not animated: + _save(im, fp, filename) + return + + offsets = [] + for imSequence in itertools.chain([im], append_images): + for im_frame in ImageSequence.Iterator(imSequence): + if not offsets: + # APP2 marker + im.encoderinfo["extra"] = ( + b"\xFF\xE2" + struct.pack(">H", 6 + 70) + b"MPF\0" + b" " * 70 + ) + JpegImagePlugin._save(im_frame, fp, filename) + offsets.append(fp.tell()) + else: + im_frame.save(fp, "JPEG") + offsets.append(fp.tell() - offsets[-1]) + + ifd = TiffImagePlugin.ImageFileDirectory_v2() + ifd[0xB001] = len(offsets) + + mpentries = b"" + data_offset = 0 + for i, size in enumerate(offsets): + if i == 0: + mptype = 0x030000 # Baseline MP Primary Image + else: + mptype = 0x000000 # Undefined + mpentries += struct.pack(" 1 + self._fp = self.fp # FIXME: hack + self._fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self._fp.seek(pos) + + def seek(self, frame): + if not self._seek_check(frame): + return + self.fp = self._fp + self.offset = self.__mpoffsets[frame] + + self.fp.seek(self.offset + 2) # skip SOI marker + segment = self.fp.read(2) + if not segment: + raise ValueError("No data found for frame") + self._size = self._initial_size + if i16(segment) == 0xFFE1: # APP1 + n = i16(self.fp.read(2)) - 2 + self.info["exif"] = ImageFile._safe_read(self.fp, n) + self._reload_exif() + + mptype = self.mpinfo[0xB002][frame]["Attribute"]["MPType"] + if mptype.startswith("Large Thumbnail"): + exif = self.getexif().get_ifd(0x8769) + if 40962 in exif and 40963 in exif: + self._size = (exif[40962], exif[40963]) + elif "exif" in self.info: + del self.info["exif"] + self._reload_exif() + + self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))] + self.__frame = frame + + def tell(self): + return self.__frame + + @staticmethod + def adopt(jpeg_instance, mpheader=None): + """ + Transform the instance of JpegImageFile into + an instance of MpoImageFile. + After the call, the JpegImageFile is extended + to be an MpoImageFile. + + This is essentially useful when opening a JPEG + file that reveals itself as an MPO, to avoid + double call to _open. + """ + jpeg_instance.__class__ = MpoImageFile + jpeg_instance._after_jpeg_open(mpheader) + return jpeg_instance + + +# --------------------------------------------------------------------- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) +Image.register_save_all(MpoImageFile.format, _save_all) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/sbsheriff/Lib/site-packages/PIL/MspImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/MspImagePlugin.py new file mode 100644 index 0000000..c4d7ddb --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/MspImagePlugin.py @@ -0,0 +1,194 @@ +# +# The Python Imaging Library. +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# 17-02-21 es Fixed RLE interpretation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# Copyright (c) Eric Soroos 2017. +# +# See the README file for information on usage and redistribution. +# +# More info on this format: https://archive.org/details/gg243631 +# Page 313: +# Figure 205. Windows Paint Version 1: "DanM" Format +# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03 +# +# See also: https://www.fileformat.info/format/mspaint/egff.htm + +import io +import struct + +from . import Image, ImageFile +from ._binary import i16le as i16 +from ._binary import o16le as o16 + +# +# read MSP files + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if not _accept(s): + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s, i) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self._size = i16(s, 4), i16(s, 6) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))] + else: + self.tile = [("MSP", (0, 0) + self.size, 32, None)] + + +class MspDecoder(ImageFile.PyDecoder): + # The algo for the MSP decoder is from + # https://www.fileformat.info/format/mspaint/egff.htm + # cc-by-attribution -- That page references is taken from the + # Encyclopedia of Graphics File Formats and is licensed by + # O'Reilly under the Creative Common/Attribution license + # + # For RLE encoded files, the 32byte header is followed by a scan + # line map, encoded as one 16bit word of encoded byte length per + # line. + # + # NOTE: the encoded length of the line can be 0. This was not + # handled in the previous version of this encoder, and there's no + # mention of how to handle it in the documentation. From the few + # examples I've seen, I've assumed that it is a fill of the + # background color, in this case, white. + # + # + # Pseudocode of the decoder: + # Read a BYTE value as the RunType + # If the RunType value is zero + # Read next byte as the RunCount + # Read the next byte as the RunValue + # Write the RunValue byte RunCount times + # If the RunType value is non-zero + # Use this value as the RunCount + # Read and write the next RunCount bytes literally + # + # e.g.: + # 0x00 03 ff 05 00 01 02 03 04 + # would yield the bytes: + # 0xff ff ff 00 01 02 03 04 + # + # which are then interpreted as a bit packed mode '1' image + + _pulls_fd = True + + def decode(self, buffer): + + img = io.BytesIO() + blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8)) + try: + self.fd.seek(32) + rowmap = struct.unpack_from( + f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2) + ) + except struct.error as e: + raise OSError("Truncated MSP file in row map") from e + + for x, rowlen in enumerate(rowmap): + try: + if rowlen == 0: + img.write(blank_line) + continue + row = self.fd.read(rowlen) + if len(row) != rowlen: + raise OSError( + "Truncated MSP file, expected %d bytes on row %s", (rowlen, x) + ) + idx = 0 + while idx < rowlen: + runtype = row[idx] + idx += 1 + if runtype == 0: + (runcount, runval) = struct.unpack_from("Bc", row, idx) + img.write(runval * runcount) + idx += 2 + else: + runcount = runtype + img.write(row[idx : idx + runcount]) + idx += runcount + + except struct.error as e: + raise OSError(f"Corrupted MSP file in row {x}") from e + + self.set_as_raw(img.getvalue(), ("1", 0, 1)) + + return -1, 0 + + +Image.register_decoder("MSP", MspDecoder) + + +# +# write MSP files (uncompressed only) + + +def _save(im, fp, filename): + + if im.mode != "1": + raise OSError(f"cannot write mode {im.mode} as MSP") + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))]) + + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/sbsheriff/Lib/site-packages/PIL/PSDraw.py b/sbsheriff/Lib/site-packages/PIL/PSDraw.py new file mode 100644 index 0000000..13b3048 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PSDraw.py @@ -0,0 +1,229 @@ +# +# The Python Imaging Library +# $Id$ +# +# Simple PostScript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import sys + +from . import EpsImagePlugin + +## +# Simple PostScript graphics interface. + + +class PSDraw: + """ + Sets up printing to the given file. If ``fp`` is omitted, + ``sys.stdout.buffer`` or ``sys.stdout`` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + try: + fp = sys.stdout.buffer + except AttributeError: + fp = sys.stdout + self.fp = fp + + def begin_document(self, id=None): + """Set up printing of a document. (Write PostScript DSC header.)""" + # FIXME: incomplete + self.fp.write( + b"%!PS-Adobe-3.0\n" + b"save\n" + b"/showpage { } def\n" + b"%%EndComments\n" + b"%%BeginDocument\n" + ) + # self.fp.write(ERROR_PS) # debugging! + self.fp.write(EDROFF_PS) + self.fp.write(VDI_PS) + self.fp.write(b"%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write PostScript DSC footer.)""" + self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A PostScript font name + :param size: Size in points. + """ + font = bytes(font, "UTF-8") + if font not in self.isofont: + # reencode font + self.fp.write(b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font, font)) + self.isofont[font] = 1 + # rough + self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + PostScript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1)) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A tuple of four integers, specifying left, bottom, width and + height. + """ + self.fp.write(b"%d %d M 0 %d %d Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = bytes(text, "UTF-8") + text = b"\\(".join(text.split(b"(")) + text = b"\\)".join(text.split(b")")) + xy += (text,) + self.fp.write(b"%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = im.size[0] * 72 / dpi + y = im.size[1] * 72 / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self.fp.write(b"%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self.fp.write(b"\ngrestore\n") + + +# -------------------------------------------------------------------- +# PostScript driver + +# +# EDROFF.PS -- PostScript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + + +EDROFF_PS = b"""\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- PostScript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = b"""\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup 0 exch rlineto + exch neg 0 rlineto + 0 exch neg rlineto + setgray fill } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = b"""\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/sbsheriff/Lib/site-packages/PIL/PaletteFile.py b/sbsheriff/Lib/site-packages/PIL/PaletteFile.py new file mode 100644 index 0000000..ee9dca8 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PaletteFile.py @@ -0,0 +1,53 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from ._binary import o8 + + +class PaletteFile: + """File handler for Teragon-style palette files.""" + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/sbsheriff/Lib/site-packages/PIL/PalmImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..700f10e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,227 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from . import Image, ImageFile +from ._binary import o8 +from ._binary import o16be as o16b + +# fmt: off +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) +# fmt: on + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues))) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for colormapValue in _Palm8BitColormapValues: + palettedata += colormapValue + palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues)) + image.putpalette(palettedata) + return image + + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000} + +_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00} + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + + +def _save(im, fp, filename): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif im.mode == "L": + if im.encoderinfo.get("bpp") in (1, 2, 4): + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift) + ) + elif im.info.get("bpp") in (1, 2, 4): + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval)) + else: + raise OSError(f"cannot write mode {im.mode} as Palm") + + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise OSError(f"cannot write mode {im.mode} as Palm") + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == "RGB": + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2]) + ) + elif colormapmode == "RGBA": + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2]) + ) + + # now convert data to raw form + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/sbsheriff/Lib/site-packages/PIL/PcdImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..38caf5c --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PcdImagePlugin.py @@ -0,0 +1,63 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = s[1538] & 3 + self.tile_post_rotate = None + if orientation == 1: + self.tile_post_rotate = 90 + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self._size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] + + def load_end(self): + if self.tile_post_rotate: + # Handle rotated PCDs + self.im = self.im.rotate(self.tile_post_rotate) + self._size = self.im.size + + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/sbsheriff/Lib/site-packages/PIL/PcfFontFile.py b/sbsheriff/Lib/site-packages/PIL/PcfFontFile.py new file mode 100644 index 0000000..442ac70 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PcfFontFile.py @@ -0,0 +1,246 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import FontFile, Image +from ._binary import i8 +from ._binary import i16be as b16 +from ._binary import i16le as l16 +from ._binary import i32be as b32 +from ._binary import i32le as l32 + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = 1 << 0 +PCF_ACCELERATORS = 1 << 1 +PCF_METRICS = 1 << 2 +PCF_BITMAPS = 1 << 3 +PCF_INK_METRICS = 1 << 4 +PCF_BDF_ENCODINGS = 1 << 5 +PCF_SWIDTHS = 1 << 6 +PCF_GLYPH_NAMES = 1 << 7 +PCF_BDF_ACCELERATORS = 1 << 8 + +BYTES_PER_ROW = [ + lambda bits: ((bits + 7) >> 3), + lambda bits: ((bits + 15) >> 3) & ~1, + lambda bits: ((bits + 31) >> 3) & ~3, + lambda bits: ((bits + 63) >> 3) & ~7, +] + + +def sz(s, o): + return s[o : s.index(b"\0", o)] + + +class PcfFontFile(FontFile.FontFile): + """Font file plugin for the X11 PCF format.""" + + name = "name" + + def __init__(self, fp, charset_encoding="iso8859-1"): + + self.charset_encoding = charset_encoding + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + super().__init__() + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch, ix in enumerate(encoding): + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d - y, x + l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xFF00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, 0)) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, attributes)) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise OSError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmap_sizes = [] + for i in range(4): + bitmap_sizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmap_sizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i + 1] + bitmaps.append(Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))) + + return bitmaps + + def _load_encoding(self): + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + first_col, last_col = i16(fp.read(2)), i16(fp.read(2)) + first_row, last_row = i16(fp.read(2)), i16(fp.read(2)) + + i16(fp.read(2)) # default + + nencoding = (last_col - first_col + 1) * (last_row - first_row + 1) + + # map character code to bitmap index + encoding = [None] * min(256, nencoding) + + encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)] + + for i in range(first_col, len(encoding)): + try: + encoding_offset = encoding_offsets[ + ord(bytearray([i]).decode(self.charset_encoding)) + ] + if encoding_offset != 0xFFFF: + encoding[i] = encoding_offset + except UnicodeDecodeError: + # character is not supported in selected encoding + pass + + return encoding diff --git a/sbsheriff/Lib/site-packages/PIL/PcxImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..841c18a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PcxImagePlugin.py @@ -0,0 +1,220 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import io +import logging + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +logger = logging.getLogger(__name__) + + +def _accept(prefix): + return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = s[1] + bits = s[3] + planes = s[65] + provided_stride = i16(s, 66) + logger.debug( + "PCX version %s, bits %s, planes %s, stride %s", + version, + bits, + planes, + provided_stride, + ) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, io.SEEK_END) + s = self.fp.read(769) + if len(s) == 769 and s[0] == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise OSError("unknown PCX mode") + + self.mode = mode + self._size = bbox[2] - bbox[0], bbox[3] - bbox[1] + + # Don't trust the passed in stride. + # Calculate the approximate position for ourselves. + # CVE-2020-35653 + stride = (self._size[0] * bits + 7) // 8 + + # While the specification states that this must be even, + # not all images follow this + if provided_stride != stride: + stride += stride % 2 + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + + +# -------------------------------------------------------------------- +# save PCX files + + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + + +def _save(im, fp, filename): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError as e: + raise ValueError(f"Cannot save {im.mode} images as PCX") from e + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug( + "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], + bits, + stride, + ) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + + o8(version) + + o8(1) + + o8(bits) + + o16(0) + + o16(0) + + o16(im.size[0] - 1) + + o16(im.size[1] - 1) + + o16(dpi[0]) + + o16(dpi[1]) + + b"\0" * 24 + + b"\xFF" * 24 + + b"\0" + + o8(planes) + + o16(stride) + + o16(1) + + o16(screen[0]) + + o16(screen[1]) + + b"\0" * 54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + palette = im.im.getpalette("RGB", "RGB") + palette += b"\x00" * (768 - len(palette)) + fp.write(palette) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i) * 3) + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") + +Image.register_mime(PcxImageFile.format, "image/x-pcx") diff --git a/sbsheriff/Lib/site-packages/PIL/PdfImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..404759a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PdfImagePlugin.py @@ -0,0 +1,271 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +import io +import math +import os +import time + +from . import Image, ImageFile, ImageSequence, PdfParser, __version__, features + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + + +def _save(im, fp, filename, save_all=False): + is_appending = im.encoderinfo.get("append", False) + if is_appending: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") + else: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") + + resolution = im.encoderinfo.get("resolution", 72.0) + + info = { + "title": None + if is_appending + else os.path.splitext(os.path.basename(filename))[0], + "author": None, + "subject": None, + "keywords": None, + "creator": None, + "producer": None, + "creationDate": None if is_appending else time.gmtime(), + "modDate": None if is_appending else time.gmtime(), + } + for k, default in info.items(): + v = im.encoderinfo.get(k) if k in im.encoderinfo else default + if v: + existing_pdf.info[k[0].upper() + k[1:]] = v + + # + # make sure image data is available + im.load() + + existing_pdf.start_writing() + existing_pdf.write_header() + existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver") + + # + # pages + ims = [im] + if save_all: + append_images = im.encoderinfo.get("append_images", []) + for append_im in append_images: + append_im.encoderinfo = im.encoderinfo.copy() + ims.append(append_im) + number_of_pages = 0 + image_refs = [] + page_refs = [] + contents_refs = [] + for im in ims: + im_number_of_pages = 1 + if save_all: + try: + im_number_of_pages = im.n_frames + except AttributeError: + # Image format does not have n_frames. + # It is a single frame image + pass + number_of_pages += im_number_of_pages + for i in range(im_number_of_pages): + image_refs.append(existing_pdf.next_object_id(0)) + page_refs.append(existing_pdf.next_object_id(0)) + contents_refs.append(existing_pdf.next_object_id(0)) + existing_pdf.pages.append(page_refs[-1]) + + # + # catalog and list of pages + existing_pdf.write_catalog() + + page_number = 0 + for im_sequence in ims: + im_pages = ImageSequence.Iterator(im_sequence) if save_all else [im_sequence] + for im in im_pages: + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode + # (packbits) or LZWDecode (tiff/lzw compression). Note that + # PDF 1.2 also supports Flatedecode (zip compression). + + bits = 8 + params = None + decode = None + + # + # Get image characteristics + + width, height = im.size + + if im.mode == "1": + if features.check("libtiff"): + filter = "CCITTFaxDecode" + bits = 1 + params = PdfParser.PdfArray( + [ + PdfParser.PdfDict( + { + "K": -1, + "BlackIs1": True, + "Columns": width, + "Rows": height, + } + ) + ] + ) + else: + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "L": + filter = "DCTDecode" + # params = f"<< /Predictor 15 /Columns {width-2} >>" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "P": + filter = "ASCIIHexDecode" + palette = im.getpalette() + colorspace = [ + PdfParser.PdfName("Indexed"), + PdfParser.PdfName("DeviceRGB"), + 255, + PdfParser.PdfBinary(palette), + ] + procset = "ImageI" # indexed color + elif im.mode == "RGB": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceRGB") + procset = "ImageC" # color images + elif im.mode == "CMYK": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceCMYK") + procset = "ImageC" # color images + decode = [1, 0, 1, 0, 1, 0, 1, 0] + else: + raise ValueError(f"cannot save mode {im.mode}") + + # + # image + + op = io.BytesIO() + + if filter == "ASCIIHexDecode": + ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)]) + elif filter == "CCITTFaxDecode": + im.save( + op, + "TIFF", + compression="group4", + # use a single strip + strip_size=math.ceil(im.width / 8) * im.height, + ) + elif filter == "DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0) + im.size, 0, im.mode)]) + elif filter == "RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0) + im.size, 0, im.mode)]) + else: + raise ValueError(f"unsupported PDF filter ({filter})") + + stream = op.getvalue() + if filter == "CCITTFaxDecode": + stream = stream[8:] + filter = PdfParser.PdfArray([PdfParser.PdfName(filter)]) + else: + filter = PdfParser.PdfName(filter) + + existing_pdf.write_obj( + image_refs[page_number], + stream=stream, + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Filter=filter, + BitsPerComponent=bits, + Decode=decode, + DecodeParms=params, + ColorSpace=colorspace, + ) + + # + # page + + existing_pdf.write_page( + page_refs[page_number], + Resources=PdfParser.PdfDict( + ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], + XObject=PdfParser.PdfDict(image=image_refs[page_number]), + ), + MediaBox=[ + 0, + 0, + width * 72.0 / resolution, + height * 72.0 / resolution, + ], + Contents=contents_refs[page_number], + ) + + # + # page contents + + page_contents = b"q %f 0 0 %f 0 0 cm /image Do Q\n" % ( + width * 72.0 / resolution, + height * 72.0 / resolution, + ) + + existing_pdf.write_obj(contents_refs[page_number], stream=page_contents) + + page_number += 1 + + # + # trailer + existing_pdf.write_xref_and_trailer() + if hasattr(fp, "flush"): + fp.flush() + existing_pdf.close() + + +# +# -------------------------------------------------------------------- + + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/sbsheriff/Lib/site-packages/PIL/PdfParser.py b/sbsheriff/Lib/site-packages/PIL/PdfParser.py new file mode 100644 index 0000000..fd5cc5a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PdfParser.py @@ -0,0 +1,998 @@ +import calendar +import codecs +import collections +import mmap +import os +import re +import time +import zlib + + +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set +# on page 656 +def encode_text(s): + return codecs.BOM_UTF16_BE + s.encode("utf_16_be") + + +PDFDocEncoding = { + 0x16: "\u0017", + 0x18: "\u02D8", + 0x19: "\u02C7", + 0x1A: "\u02C6", + 0x1B: "\u02D9", + 0x1C: "\u02DD", + 0x1D: "\u02DB", + 0x1E: "\u02DA", + 0x1F: "\u02DC", + 0x80: "\u2022", + 0x81: "\u2020", + 0x82: "\u2021", + 0x83: "\u2026", + 0x84: "\u2014", + 0x85: "\u2013", + 0x86: "\u0192", + 0x87: "\u2044", + 0x88: "\u2039", + 0x89: "\u203A", + 0x8A: "\u2212", + 0x8B: "\u2030", + 0x8C: "\u201E", + 0x8D: "\u201C", + 0x8E: "\u201D", + 0x8F: "\u2018", + 0x90: "\u2019", + 0x91: "\u201A", + 0x92: "\u2122", + 0x93: "\uFB01", + 0x94: "\uFB02", + 0x95: "\u0141", + 0x96: "\u0152", + 0x97: "\u0160", + 0x98: "\u0178", + 0x99: "\u017D", + 0x9A: "\u0131", + 0x9B: "\u0142", + 0x9C: "\u0153", + 0x9D: "\u0161", + 0x9E: "\u017E", + 0xA0: "\u20AC", +} + + +def decode_text(b): + if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be") + else: + return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) + + +class PdfFormatError(RuntimeError): + """An error that probably indicates a syntactic or semantic error in the + PDF file structure""" + + pass + + +def check_format_condition(condition, error_message): + if not condition: + raise PdfFormatError(error_message) + + +class IndirectReference( + collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"]) +): + def __str__(self): + return "%s %s R" % self + + def __bytes__(self): + return self.__str__().encode("us-ascii") + + def __eq__(self, other): + return ( + other.__class__ is self.__class__ + and other.object_id == self.object_id + and other.generation == self.generation + ) + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash((self.object_id, self.generation)) + + +class IndirectObjectDef(IndirectReference): + def __str__(self): + return "%s %s obj" % self + + +class XrefTable: + def __init__(self): + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation + self.reading_finished = False + + def __setitem__(self, key, value): + if self.reading_finished: + self.new_entries[key] = value + else: + self.existing_entries[key] = value + if key in self.deleted_entries: + del self.deleted_entries[key] + + def __getitem__(self, key): + try: + return self.new_entries[key] + except KeyError: + return self.existing_entries[key] + + def __delitem__(self, key): + if key in self.new_entries: + generation = self.new_entries[key][1] + 1 + del self.new_entries[key] + self.deleted_entries[key] = generation + elif key in self.existing_entries: + generation = self.existing_entries[key][1] + 1 + self.deleted_entries[key] = generation + elif key in self.deleted_entries: + generation = self.deleted_entries[key] + else: + raise IndexError( + "object ID " + str(key) + " cannot be deleted because it doesn't exist" + ) + + def __contains__(self, key): + return key in self.existing_entries or key in self.new_entries + + def __len__(self): + return len( + set(self.existing_entries.keys()) + | set(self.new_entries.keys()) + | set(self.deleted_entries.keys()) + ) + + def keys(self): + return ( + set(self.existing_entries.keys()) - set(self.deleted_entries.keys()) + ) | set(self.new_entries.keys()) + + def write(self, f): + keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) + deleted_keys = sorted(set(self.deleted_entries.keys())) + startxref = f.tell() + f.write(b"xref\n") + while keys: + # find a contiguous sequence of object IDs + prev = None + for index, key in enumerate(keys): + if prev is None or prev + 1 == key: + prev = key + else: + contiguous_keys = keys[:index] + keys = keys[index:] + break + else: + contiguous_keys = keys + keys = None + f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys))) + for object_id in contiguous_keys: + if object_id in self.new_entries: + f.write(b"%010d %05d n \n" % self.new_entries[object_id]) + else: + this_deleted_object_id = deleted_keys.pop(0) + check_format_condition( + object_id == this_deleted_object_id, + f"expected the next deleted object ID to be {object_id}, " + f"instead found {this_deleted_object_id}", + ) + try: + next_in_linked_list = deleted_keys[0] + except IndexError: + next_in_linked_list = 0 + f.write( + b"%010d %05d f \n" + % (next_in_linked_list, self.deleted_entries[object_id]) + ) + return startxref + + +class PdfName: + def __init__(self, name): + if isinstance(name, PdfName): + self.name = name.name + elif isinstance(name, bytes): + self.name = name + else: + self.name = name.encode("us-ascii") + + def name_as_str(self): + return self.name.decode("us-ascii") + + def __eq__(self, other): + return ( + isinstance(other, PdfName) and other.name == self.name + ) or other == self.name + + def __hash__(self): + return hash(self.name) + + def __repr__(self): + return f"PdfName({repr(self.name)})" + + @classmethod + def from_pdf_stream(cls, data): + return cls(PdfParser.interpret_name(data)) + + allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"} + + def __bytes__(self): + result = bytearray(b"/") + for b in self.name: + if b in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % b) + return bytes(result) + + +class PdfArray(list): + def __bytes__(self): + return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" + + +class PdfDict(collections.UserDict): + def __setattr__(self, key, value): + if key == "data": + collections.UserDict.__setattr__(self, key, value) + else: + self[key.encode("us-ascii")] = value + + def __getattr__(self, key): + try: + value = self[key.encode("us-ascii")] + except KeyError as e: + raise AttributeError(key) from e + if isinstance(value, bytes): + value = decode_text(value) + if key.endswith("Date"): + if value.startswith("D:"): + value = value[2:] + + relationship = "Z" + if len(value) > 17: + relationship = value[14] + offset = int(value[15:17]) * 60 + if len(value) > 20: + offset += int(value[18:20]) + + format = "%Y%m%d%H%M%S"[: len(value) - 2] + value = time.strptime(value[: len(format) + 2], format) + if relationship in ["+", "-"]: + offset *= 60 + if relationship == "+": + offset *= -1 + value = time.gmtime(calendar.timegm(value) + offset) + return value + + def __bytes__(self): + out = bytearray(b"<<") + for key, value in self.items(): + if value is None: + continue + value = pdf_repr(value) + out.extend(b"\n") + out.extend(bytes(PdfName(key))) + out.extend(b" ") + out.extend(value) + out.extend(b"\n>>") + return bytes(out) + + +class PdfBinary: + def __init__(self, data): + self.data = data + + def __bytes__(self): + return b"<%s>" % b"".join(b"%02X" % b for b in self.data) + + +class PdfStream: + def __init__(self, dictionary, buf): + self.dictionary = dictionary + self.buf = buf + + def decode(self): + try: + filter = self.dictionary.Filter + except AttributeError: + return self.buf + if filter == b"FlateDecode": + try: + expected_length = self.dictionary.DL + except AttributeError: + expected_length = self.dictionary.Length + return zlib.decompress(self.buf, bufsize=int(expected_length)) + else: + raise NotImplementedError( + f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported" + ) + + +def pdf_repr(x): + if x is True: + return b"true" + elif x is False: + return b"false" + elif x is None: + return b"null" + elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)): + return bytes(x) + elif isinstance(x, int): + return str(x).encode("us-ascii") + elif isinstance(x, float): + return str(x).encode("us-ascii") + elif isinstance(x, time.struct_time): + return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")" + elif isinstance(x, dict): + return bytes(PdfDict(x)) + elif isinstance(x, list): + return bytes(PdfArray(x)) + elif isinstance(x, str): + return pdf_repr(encode_text(x)) + elif isinstance(x, bytes): + # XXX escape more chars? handle binary garbage + x = x.replace(b"\\", b"\\\\") + x = x.replace(b"(", b"\\(") + x = x.replace(b")", b"\\)") + return b"(" + x + b")" + else: + return bytes(x) + + +class PdfParser: + """Based on + https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + Supports PDF up to 1.4 + """ + + def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): + if buf and f: + raise RuntimeError("specify buf or f or filename, but not both buf and f") + self.filename = filename + self.buf = buf + self.f = f + self.start_offset = start_offset + self.should_close_buf = False + self.should_close_file = False + if filename is not None and f is None: + self.f = f = open(filename, mode) + self.should_close_file = True + if f is not None: + self.buf = buf = self.get_buf_from_file(f) + self.should_close_buf = True + if not filename and hasattr(f, "name"): + self.filename = f.name + self.cached_objects = {} + if buf: + self.read_pdf_info() + else: + self.file_size_total = self.file_size_this = 0 + self.root = PdfDict() + self.root_ref = None + self.info = PdfDict() + self.info_ref = None + self.page_tree_root = {} + self.pages = [] + self.orig_pages = [] + self.pages_ref = None + self.last_xref_section_offset = None + self.trailer_dict = {} + self.xref_table = XrefTable() + self.xref_table.reading_finished = True + if f: + self.seek_end() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + return False # do not suppress exceptions + + def start_writing(self): + self.close_buf() + self.seek_end() + + def close_buf(self): + try: + self.buf.close() + except AttributeError: + pass + self.buf = None + + def close(self): + if self.should_close_buf: + self.close_buf() + if self.f is not None and self.should_close_file: + self.f.close() + self.f = None + + def seek_end(self): + self.f.seek(0, os.SEEK_END) + + def write_header(self): + self.f.write(b"%PDF-1.4\n") + + def write_comment(self, s): + self.f.write(f"% {s}\n".encode()) + + def write_catalog(self): + self.del_root() + self.root_ref = self.next_object_id(self.f.tell()) + self.pages_ref = self.next_object_id(0) + self.rewrite_pages() + self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref) + self.write_obj( + self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages, + ) + return self.root_ref + + def rewrite_pages(self): + pages_tree_nodes_to_delete = [] + for i, page_ref in enumerate(self.orig_pages): + page_info = self.cached_objects[page_ref] + del self.xref_table[page_ref.object_id] + pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) + if page_ref not in self.pages: + # the page has been deleted + continue + # make dict keys into strings for passing to write_page + stringified_page_info = {} + for key, value in page_info.items(): + # key should be a PdfName + stringified_page_info[key.name_as_str()] = value + stringified_page_info["Parent"] = self.pages_ref + new_page_ref = self.write_page(None, **stringified_page_info) + for j, cur_page_ref in enumerate(self.pages): + if cur_page_ref == page_ref: + # replace the page reference with the new one + self.pages[j] = new_page_ref + # delete redundant Pages tree nodes from xref table + for pages_tree_node_ref in pages_tree_nodes_to_delete: + while pages_tree_node_ref: + pages_tree_node = self.cached_objects[pages_tree_node_ref] + if pages_tree_node_ref.object_id in self.xref_table: + del self.xref_table[pages_tree_node_ref.object_id] + pages_tree_node_ref = pages_tree_node.get(b"Parent", None) + self.orig_pages = [] + + def write_xref_and_trailer(self, new_root_ref=None): + if new_root_ref: + self.del_root() + self.root_ref = new_root_ref + if self.info: + self.info_ref = self.write_obj(None, self.info) + start_xref = self.xref_table.write(self.f) + num_entries = len(self.xref_table) + trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} + if self.last_xref_section_offset is not None: + trailer_dict[b"Prev"] = self.last_xref_section_offset + if self.info: + trailer_dict[b"Info"] = self.info_ref + self.last_xref_section_offset = start_xref + self.f.write( + b"trailer\n" + + bytes(PdfDict(trailer_dict)) + + b"\nstartxref\n%d\n%%%%EOF" % start_xref + ) + + def write_page(self, ref, *objs, **dict_obj): + if isinstance(ref, int): + ref = self.pages[ref] + if "Type" not in dict_obj: + dict_obj["Type"] = PdfName(b"Page") + if "Parent" not in dict_obj: + dict_obj["Parent"] = self.pages_ref + return self.write_obj(ref, *objs, **dict_obj) + + def write_obj(self, ref, *objs, **dict_obj): + f = self.f + if ref is None: + ref = self.next_object_id(f.tell()) + else: + self.xref_table[ref.object_id] = (f.tell(), ref.generation) + f.write(bytes(IndirectObjectDef(*ref))) + stream = dict_obj.pop("stream", None) + if stream is not None: + dict_obj["Length"] = len(stream) + if dict_obj: + f.write(pdf_repr(dict_obj)) + for obj in objs: + f.write(pdf_repr(obj)) + if stream is not None: + f.write(b"stream\n") + f.write(stream) + f.write(b"\nendstream\n") + f.write(b"endobj\n") + return ref + + def del_root(self): + if self.root_ref is None: + return + del self.xref_table[self.root_ref.object_id] + del self.xref_table[self.root[b"Pages"].object_id] + + @staticmethod + def get_buf_from_file(f): + if hasattr(f, "getbuffer"): + return f.getbuffer() + elif hasattr(f, "getvalue"): + return f.getvalue() + else: + try: + return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: # cannot mmap an empty file + return b"" + + def read_pdf_info(self): + self.file_size_total = len(self.buf) + self.file_size_this = self.file_size_total - self.start_offset + self.read_trailer() + self.root_ref = self.trailer_dict[b"Root"] + self.info_ref = self.trailer_dict.get(b"Info", None) + self.root = PdfDict(self.read_indirect(self.root_ref)) + if self.info_ref is None: + self.info = PdfDict() + else: + self.info = PdfDict(self.read_indirect(self.info_ref)) + check_format_condition(b"Type" in self.root, "/Type missing in Root") + check_format_condition( + self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog" + ) + check_format_condition(b"Pages" in self.root, "/Pages missing in Root") + check_format_condition( + isinstance(self.root[b"Pages"], IndirectReference), + "/Pages in Root is not an indirect reference", + ) + self.pages_ref = self.root[b"Pages"] + self.page_tree_root = self.read_indirect(self.pages_ref) + self.pages = self.linearize_page_tree(self.page_tree_root) + # save the original list of page references + # in case the user modifies, adds or deletes some pages + # and we need to rewrite the pages and their list + self.orig_pages = self.pages[:] + + def next_object_id(self, offset=None): + try: + # TODO: support reuse of deleted objects + reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) + except ValueError: + reference = IndirectReference(1, 0) + if offset is not None: + self.xref_table[reference.object_id] = (offset, 0) + return reference + + delimiter = rb"[][()<>{}/%]" + delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]" + whitespace = rb"[\000\011\012\014\015\040]" + whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]" + whitespace_optional = whitespace + b"*" + whitespace_mandatory = whitespace + b"+" + # No "\012" aka "\n" or "\015" aka "\r": + whitespace_optional_no_nl = rb"[\000\011\014\040]*" + newline_only = rb"[\r\n]+" + newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl + re_trailer_end = re.compile( + whitespace_mandatory + + rb"trailer" + + whitespace_optional + + rb"<<(.*>>)" + + newline + + rb"startxref" + + newline + + rb"([0-9]+)" + + newline + + rb"%%EOF" + + whitespace_optional + + rb"$", + re.DOTALL, + ) + re_trailer_prev = re.compile( + whitespace_optional + + rb"trailer" + + whitespace_optional + + rb"<<(.*?>>)" + + newline + + rb"startxref" + + newline + + rb"([0-9]+)" + + newline + + rb"%%EOF" + + whitespace_optional, + re.DOTALL, + ) + + def read_trailer(self): + search_start_offset = len(self.buf) - 16384 + if search_start_offset < self.start_offset: + search_start_offset = self.start_offset + m = self.re_trailer_end.search(self.buf, search_start_offset) + check_format_condition(m, "trailer end not found") + # make sure we found the LAST trailer + last_match = m + while m: + last_match = m + m = self.re_trailer_end.search(self.buf, m.start() + 16) + if not m: + m = last_match + trailer_data = m.group(1) + self.last_xref_section_offset = int(m.group(2)) + self.trailer_dict = self.interpret_trailer(trailer_data) + self.xref_table = XrefTable() + self.read_xref_table(xref_section_offset=self.last_xref_section_offset) + if b"Prev" in self.trailer_dict: + self.read_prev_trailer(self.trailer_dict[b"Prev"]) + + def read_prev_trailer(self, xref_section_offset): + trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) + m = self.re_trailer_prev.search( + self.buf[trailer_offset : trailer_offset + 16384] + ) + check_format_condition(m, "previous trailer not found") + trailer_data = m.group(1) + check_format_condition( + int(m.group(2)) == xref_section_offset, + "xref section offset in previous trailer doesn't match what was expected", + ) + trailer_dict = self.interpret_trailer(trailer_data) + if b"Prev" in trailer_dict: + self.read_prev_trailer(trailer_dict[b"Prev"]) + + re_whitespace_optional = re.compile(whitespace_optional) + re_name = re.compile( + whitespace_optional + + rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + + delimiter_or_ws + + rb")" + ) + re_dict_start = re.compile(whitespace_optional + rb"<<") + re_dict_end = re.compile(whitespace_optional + rb">>" + whitespace_optional) + + @classmethod + def interpret_trailer(cls, trailer_data): + trailer = {} + offset = 0 + while True: + m = cls.re_name.match(trailer_data, offset) + if not m: + m = cls.re_dict_end.match(trailer_data, offset) + check_format_condition( + m and m.end() == len(trailer_data), + "name not found in trailer, remaining data: " + + repr(trailer_data[offset:]), + ) + break + key = cls.interpret_name(m.group(1)) + value, offset = cls.get_value(trailer_data, m.end()) + trailer[key] = value + check_format_condition( + b"Size" in trailer and isinstance(trailer[b"Size"], int), + "/Size not in trailer or not an integer", + ) + check_format_condition( + b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), + "/Root not in trailer or not an indirect reference", + ) + return trailer + + re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?") + + @classmethod + def interpret_name(cls, raw, as_text=False): + name = b"" + for m in cls.re_hashes_in_name.finditer(raw): + if m.group(3): + name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) + else: + name += m.group(1) + if as_text: + return name.decode("utf-8") + else: + return bytes(name) + + re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")") + re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")") + re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")") + re_int = re.compile( + whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")" + ) + re_real = re.compile( + whitespace_optional + + rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + + delimiter_or_ws + + rb")" + ) + re_array_start = re.compile(whitespace_optional + rb"\[") + re_array_end = re.compile(whitespace_optional + rb"]") + re_string_hex = re.compile( + whitespace_optional + rb"<(" + whitespace_or_hex + rb"*)>" + ) + re_string_lit = re.compile(whitespace_optional + rb"\(") + re_indirect_reference = re.compile( + whitespace_optional + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"R(?=" + + delimiter_or_ws + + rb")" + ) + re_indirect_def_start = re.compile( + whitespace_optional + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"obj(?=" + + delimiter_or_ws + + rb")" + ) + re_indirect_def_end = re.compile( + whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")" + ) + re_comment = re.compile( + rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*" + ) + re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n") + re_stream_end = re.compile( + whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")" + ) + + @classmethod + def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): + if max_nesting == 0: + return None, None + m = cls.re_comment.match(data, offset) + if m: + offset = m.end() + m = cls.re_indirect_def_start.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object definition: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object definition: generation must be non-negative", + ) + check_format_condition( + expect_indirect is None + or expect_indirect + == IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected", + ) + object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1) + if offset is None: + return object, None + m = cls.re_indirect_def_end.match(data, offset) + check_format_condition(m, "indirect object definition end not found") + return object, m.end() + check_format_condition( + not expect_indirect, "indirect object definition not found" + ) + m = cls.re_indirect_reference.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object reference: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object reference: generation must be non-negative", + ) + return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() + m = cls.re_dict_start.match(data, offset) + if m: + offset = m.end() + result = {} + m = cls.re_dict_end.match(data, offset) + while not m: + key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + if offset is None: + return result, None + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result[key] = value + if offset is None: + return result, None + m = cls.re_dict_end.match(data, offset) + offset = m.end() + m = cls.re_stream_start.match(data, offset) + if m: + try: + stream_len = int(result[b"Length"]) + except (TypeError, KeyError, ValueError) as e: + raise PdfFormatError( + "bad or missing Length in stream dict (%r)" + % result.get(b"Length", None) + ) from e + stream_data = data[m.end() : m.end() + stream_len] + m = cls.re_stream_end.match(data, m.end() + stream_len) + check_format_condition(m, "stream end not found") + offset = m.end() + result = PdfStream(PdfDict(result), stream_data) + else: + result = PdfDict(result) + return result, offset + m = cls.re_array_start.match(data, offset) + if m: + offset = m.end() + result = [] + m = cls.re_array_end.match(data, offset) + while not m: + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result.append(value) + if offset is None: + return result, None + m = cls.re_array_end.match(data, offset) + return result, m.end() + m = cls.re_null.match(data, offset) + if m: + return None, m.end() + m = cls.re_true.match(data, offset) + if m: + return True, m.end() + m = cls.re_false.match(data, offset) + if m: + return False, m.end() + m = cls.re_name.match(data, offset) + if m: + return PdfName(cls.interpret_name(m.group(1))), m.end() + m = cls.re_int.match(data, offset) + if m: + return int(m.group(1)), m.end() + m = cls.re_real.match(data, offset) + if m: + # XXX Decimal instead of float??? + return float(m.group(1)), m.end() + m = cls.re_string_hex.match(data, offset) + if m: + # filter out whitespace + hex_string = bytearray( + b for b in m.group(1) if b in b"0123456789abcdefABCDEF" + ) + if len(hex_string) % 2 == 1: + # append a 0 if the length is not even - yes, at the end + hex_string.append(ord(b"0")) + return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() + m = cls.re_string_lit.match(data, offset) + if m: + return cls.get_literal_string(data, m.end()) + # return None, offset # fallback (only for debugging) + raise PdfFormatError("unrecognized object: " + repr(data[offset : offset + 32])) + + re_lit_str_token = re.compile( + rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))" + ) + escaped_chars = { + b"n": b"\n", + b"r": b"\r", + b"t": b"\t", + b"b": b"\b", + b"f": b"\f", + b"(": b"(", + b")": b")", + b"\\": b"\\", + ord(b"n"): b"\n", + ord(b"r"): b"\r", + ord(b"t"): b"\t", + ord(b"b"): b"\b", + ord(b"f"): b"\f", + ord(b"("): b"(", + ord(b")"): b")", + ord(b"\\"): b"\\", + } + + @classmethod + def get_literal_string(cls, data, offset): + nesting_depth = 0 + result = bytearray() + for m in cls.re_lit_str_token.finditer(data, offset): + result.extend(data[offset : m.start()]) + if m.group(1): + result.extend(cls.escaped_chars[m.group(1)[1]]) + elif m.group(2): + result.append(int(m.group(2)[1:], 8)) + elif m.group(3): + pass + elif m.group(5): + result.extend(b"\n") + elif m.group(6): + result.extend(b"(") + nesting_depth += 1 + elif m.group(7): + if nesting_depth == 0: + return bytes(result), m.end() + result.extend(b")") + nesting_depth -= 1 + offset = m.end() + raise PdfFormatError("unfinished literal string") + + re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline) + re_xref_subsection_start = re.compile( + whitespace_optional + + rb"([0-9]+)" + + whitespace_mandatory + + rb"([0-9]+)" + + whitespace_optional + + newline_only + ) + re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") + + def read_xref_table(self, xref_section_offset): + subsection_found = False + m = self.re_xref_section_start.match( + self.buf, xref_section_offset + self.start_offset + ) + check_format_condition(m, "xref section start not found") + offset = m.end() + while True: + m = self.re_xref_subsection_start.match(self.buf, offset) + if not m: + check_format_condition( + subsection_found, "xref subsection start not found" + ) + break + subsection_found = True + offset = m.end() + first_object = int(m.group(1)) + num_objects = int(m.group(2)) + for i in range(first_object, first_object + num_objects): + m = self.re_xref_entry.match(self.buf, offset) + check_format_condition(m, "xref entry not found") + offset = m.end() + is_free = m.group(3) == b"f" + generation = int(m.group(2)) + if not is_free: + new_entry = (int(m.group(1)), generation) + check_format_condition( + i not in self.xref_table or self.xref_table[i] == new_entry, + "xref entry duplicated (and not identical)", + ) + self.xref_table[i] = new_entry + return offset + + def read_indirect(self, ref, max_nesting=-1): + offset, generation = self.xref_table[ref[0]] + check_format_condition( + generation == ref[1], + f"expected to find generation {ref[1]} for object ID {ref[0]} in xref " + f"table, instead found generation {generation} at offset {offset}", + ) + value = self.get_value( + self.buf, + offset + self.start_offset, + expect_indirect=IndirectReference(*ref), + max_nesting=max_nesting, + )[0] + self.cached_objects[ref] = value + return value + + def linearize_page_tree(self, node=None): + if node is None: + node = self.page_tree_root + check_format_condition( + node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages" + ) + pages = [] + for kid in node[b"Kids"]: + kid_object = self.read_indirect(kid) + if kid_object[b"Type"] == b"Page": + pages.append(kid) + else: + pages.extend(self.linearize_page_tree(node=kid_object)) + return pages diff --git a/sbsheriff/Lib/site-packages/PIL/PixarImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..c4860b6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PixarImagePlugin.py @@ -0,0 +1,70 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile +from ._binary import i16le as i16 + +# +# helpers + + +def _accept(prefix): + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label + s = self.fp.read(4) + if not _accept(s): + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self._size = i16(s, 418), i16(s, 416) + + # get channel/depth descriptions + mode = i16(s, 424), i16(s, 426) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] + + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/sbsheriff/Lib/site-packages/PIL/PngImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PngImagePlugin.py new file mode 100644 index 0000000..2c53be1 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PngImagePlugin.py @@ -0,0 +1,1455 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import itertools +import logging +import re +import struct +import warnings +import zlib +from enum import IntEnum + +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._binary import o16be as o16 +from ._binary import o32be as o32 +from ._deprecate import deprecate + +logger = logging.getLogger(__name__) + +is_cid = re.compile(rb"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + # Greyscale + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + # Truecolour + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + # Indexed-colour + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + # Greyscale with alpha + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + # Truecolour with alpha + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b"^\xff*\x00\xff*$") + +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +""" +Maximum decompressed size for a iTXt or zTXt chunk. +Eliminates decompression bombs where compressed chunks can expand 1000x. +See :ref:`Text in PNG File Format`. +""" +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK +""" +Set the maximum total text chunk size. +See :ref:`Text in PNG File Format`. +""" + + +# APNG frame disposal modes +class Disposal(IntEnum): + OP_NONE = 0 + """ + No disposal is done on this frame before rendering the next frame. + See :ref:`Saving APNG sequences`. + """ + OP_BACKGROUND = 1 + """ + This frame’s modified region is cleared to fully transparent black before rendering + the next frame. + See :ref:`Saving APNG sequences`. + """ + OP_PREVIOUS = 2 + """ + This frame’s modified region is reverted to the previous frame’s contents before + rendering the next frame. + See :ref:`Saving APNG sequences`. + """ + + +# APNG frame blend modes +class Blend(IntEnum): + OP_SOURCE = 0 + """ + All color components of this frame, including alpha, overwrite the previous output + image contents. + See :ref:`Saving APNG sequences`. + """ + OP_OVER = 1 + """ + This frame should be alpha composited with the previous output image contents. + See :ref:`Saving APNG sequences`. + """ + + +def __getattr__(name): + for enum, prefix in {Disposal: "APNG_DISPOSE_", Blend: "APNG_BLEND_"}.items(): + if name.startswith(prefix): + name = name[len(prefix) :] + if name in enum.__members__: + deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}") + return enum[name] + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +def _crc32(data, seed=0): + return zlib.crc32(data, seed) & 0xFFFFFFFF + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + + +class ChunkStream: + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + def read(self): + """Fetch a new chunk. Returns header information.""" + cid = None + + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + if not ImageFile.LOAD_TRUNCATED_IMAGES: + raise SyntaxError(f"broken PNG file (chunk {repr(cid)})") + + return cid, pos, length + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.queue = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + """Call the appropriate chunk handler""" + + logger.debug("STREAM %r %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length) + + def crc(self, cid, data): + """Read and verify checksum""" + + # Skip CRC checks for ancillary chunks if allowed to load truncated + # images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1): + self.crc_skip(cid, data) + return + + try: + crc1 = _crc32(data, _crc32(cid)) + crc2 = i32(self.fp.read(4)) + if crc1 != crc2: + raise SyntaxError( + f"broken PNG file (bad header checksum in {repr(cid)})" + ) + except struct.error as e: + raise SyntaxError( + f"broken PNG file (incomplete checksum in {repr(cid)})" + ) from e + + def crc_skip(self, cid, data): + """Read checksum""" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error as e: + raise OSError("truncated PNG file") from e + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + + @staticmethod + def __new__(cls, text, lang=None, tkey=None): + """ + :param cls: the class to use when creating the instance + :param text: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo: + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data, after_idat=False): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + :param after_idat: for use with private chunks. Whether the chunk + should be written after IDAT + + """ + + chunk = [cid, data] + if after_idat: + chunk.append(True) + self.chunks.append(tuple(chunk)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add( + b"iTXt", + key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value), + ) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value) + + def add_text(self, key, value, zip=False): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, zip=zip) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode("latin-1", "strict") + except UnicodeError: + return self.add_itxt(key, value, zip=zip) + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + + +class PngStream(ChunkStream): + def __init__(self, fp): + super().__init__(fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + self.im_custom_mimetype = None + self.im_n_frames = None + self._seq_num = None + self.rewind_state = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError( + "Too much memory used in text chunks: " + f"{self.text_memory}>MAX_TEXT_MEMORY" + ) + + def save_rewind(self): + self.rewind_state = { + "info": self.im_info.copy(), + "tile": self.im_tile, + "seq_num": self._seq_num, + } + + def rewind(self): + self.im_info = self.rewind_state["info"] + self.im_tile = self.rewind_state["tile"] + self._seq_num = self.rewind_state["seq_num"] + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %r", s[:i]) + logger.debug("Compression method %s", s[i]) + comp_method = s[i] + if comp_method != 0: + raise SyntaxError(f"Unknown compression method {comp_method} in iCCP chunk") + try: + icc_profile = _safe_zlib_decompress(s[i + 2 :]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + if length < 13: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + raise ValueError("Truncated IHDR chunk") + self.im_size = i32(s, 0), i32(s, 4) + try: + self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])] + except Exception: + pass + if s[12]: + self.im_info["interlace"] = 1 + if s[11]: + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + if "bbox" in self.im_info: + tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)] + else: + if self.im_n_frames is not None: + self.im_info["default_image"] = True + tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)] + self.im_tile = tile + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode in ("1", "L", "I"): + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4) + return s + + def chunk_gAMA(self, pos, length): + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_cHRM(self, pos, length): + # chromaticity, 8 unsigned ints, actual value is scaled by 100,000 + # WP x,y, Red x,y, Green x,y Blue x,y + + s = ImageFile._safe_read(self.fp, length) + raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) + self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) + return s + + def chunk_sRGB(self, pos, length): + # srgb rendering intent, 1 byte + # 0 perceptual + # 1 relative colorimetric + # 2 saturation + # 3 absolute colorimetric + + s = ImageFile._safe_read(self.fp, length) + if length < 1: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + raise ValueError("Truncated sRGB chunk") + self.im_info["srgb"] = s[0] + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + if length < 9: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + raise ValueError("Truncated pHYs chunk") + px, py = i32(s, 0), i32(s, 4) + unit = s[8] + if unit == 1: # meter + dpi = px * 0.0254, py * 0.0254 + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + k = k.decode("latin-1", "strict") + v_str = v.decode("latin-1", "replace") + + self.im_info[k] = v if k == "exif" else v_str + self.im_text[k] = v_str + self.check_text_memory(len(v_str)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = v[0] + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError(f"Unknown compression method {comp_method} in zTXt chunk") + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + k = k.decode("latin-1", "strict") + v = v.decode("latin-1", "replace") + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = r[0], r[1], r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + def chunk_eXIf(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + self.im_info["exif"] = b"Exif\x00\x00" + s + return s + + # APNG chunks + def chunk_acTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + if length < 8: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + raise ValueError("APNG contains truncated acTL chunk") + if self.im_n_frames is not None: + self.im_n_frames = None + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + n_frames = i32(s) + if n_frames == 0 or n_frames > 0x80000000: + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + self.im_n_frames = n_frames + self.im_info["loop"] = i32(s, 4) + self.im_custom_mimetype = "image/apng" + return s + + def chunk_fcTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + if length < 26: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + raise ValueError("APNG contains truncated fcTL chunk") + seq = i32(s) + if (self._seq_num is None and seq != 0) or ( + self._seq_num is not None and self._seq_num != seq - 1 + ): + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + width, height = i32(s, 4), i32(s, 8) + px, py = i32(s, 12), i32(s, 16) + im_w, im_h = self.im_size + if px + width > im_w or py + height > im_h: + raise SyntaxError("APNG contains invalid frames") + self.im_info["bbox"] = (px, py, px + width, py + height) + delay_num, delay_den = i16(s, 20), i16(s, 22) + if delay_den == 0: + delay_den = 100 + self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000 + self.im_info["disposal"] = s[24] + self.im_info["blend"] = s[25] + return s + + def chunk_fdAT(self, pos, length): + if length < 4: + if ImageFile.LOAD_TRUNCATED_IMAGES: + s = ImageFile._safe_read(self.fp, length) + return s + raise ValueError("APNG contains truncated fDAT chunk") + s = ImageFile._safe_read(self.fp, 4) + seq = i32(s) + if self._seq_num != seq - 1: + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + return self.chunk_IDAT(pos + 4, length - 4) + + +# -------------------------------------------------------------------- +# PNG reader + + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if not _accept(self.fp.read(8)): + raise SyntaxError("not a PNG file") + self._fp = self.fp + self.__frame = 0 + + # + # Parse headers up to the first IDAT or fDAT chunk + + self.private_chunks = [] + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s)) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self._size = self.png.im_size + self.info = self.png.im_info + self._text = None + self.tile = self.png.im_tile + self.custom_mimetype = self.png.im_custom_mimetype + self.n_frames = self.png.im_n_frames or 1 + self.default_image = self.info.get("default_image", False) + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + if cid == b"fdAT": + self.__prepare_idat = length - 4 + else: + self.__prepare_idat = length # used by load_prepare() + + if self.png.im_n_frames is not None: + self._close_exclusive_fp_after_loading = False + self.png.save_rewind() + self.__rewind_idat = self.__prepare_idat + self.__rewind = self._fp.tell() + if self.default_image: + # IDAT chunk contains default image and not first animation frame + self.n_frames += 1 + self._seek(0) + self.is_animated = self.n_frames > 1 + + @property + def text(self): + # experimental + if self._text is None: + # iTxt, tEXt and zTXt chunks may appear at the end of the file + # So load the file to ensure that they are read + if self.is_animated: + frame = self.__frame + # for APNG, seek to the final frame before loading + self.seek(self.n_frames - 1) + self.load() + if self.is_animated: + self.seek(frame) + return self._text + + def verify(self): + """Verify PNG file""" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0, True) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + raise EOFError("no more images in APNG file") from e + + def _seek(self, frame, rewind=False): + if frame == 0: + if rewind: + self._fp.seek(self.__rewind) + self.png.rewind() + self.__prepare_idat = self.__rewind_idat + self.im = None + if self.pyaccess: + self.pyaccess = None + self.info = self.png.im_info + self.tile = self.png.im_tile + self.fp = self._fp + self._prev_im = None + self.dispose = None + self.default_image = self.info.get("default_image", False) + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + self.__frame = 0 + else: + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + + # ensure previous frame was loaded + self.load() + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + self._prev_im = self.im.copy() + + self.fp = self._fp + + # advance to the next frame + if self.__prepare_idat: + ImageFile._safe_read(self.fp, self.__prepare_idat) + self.__prepare_idat = 0 + frame_start = False + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + raise EOFError("No more images in APNG file") + if cid == b"fcTL": + if frame_start: + # there must be at least one fdAT chunk between fcTL chunks + raise SyntaxError("APNG missing frame data") + frame_start = True + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + if frame_start: + self.__prepare_idat = length + break + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + ImageFile._safe_read(self.fp, length) + + self.__frame = frame + self.tile = self.png.im_tile + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + + if not self.tile: + raise EOFError + + # setup frame disposal (actual disposal done when needed in the next _seek()) + if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS: + self.dispose_op = Disposal.OP_BACKGROUND + + if self.dispose_op == Disposal.OP_PREVIOUS: + self.dispose = self._prev_im.copy() + self.dispose = self._crop(self.dispose, self.dispose_extent) + elif self.dispose_op == Disposal.OP_BACKGROUND: + self.dispose = Image.core.fill(self.mode, self.size) + self.dispose = self._crop(self.dispose, self.dispose_extent) + else: + self.dispose = None + + def tell(self): + return self.__frame + + def load_prepare(self): + """internal: prepare to read PNG file""" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + self.__idat = self.__prepare_idat # used by load_read() + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + """internal: read more image data""" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT", b"fdAT"]: + self.png.push(cid, pos, length) + return b"" + + if cid == b"fdAT": + try: + self.png.call(cid, pos, length) + except EOFError: + pass + self.__idat = length - 4 # sequence_num has already been read + else: + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + """internal: finished reading image data""" + if self.__idat != 0: + self.fp.read(self.__idat) + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + break + elif cid == b"fcTL" and self.is_animated: + # start of the next frame, stop reading + self.__prepare_idat = 0 + self.png.push(cid, pos, length) + break + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s, True)) + self._text = self.png.im_text + if not self.is_animated: + self.png.close() + self.png = None + else: + if self._prev_im and self.blend_op == Blend.OP_OVER: + updated = self._crop(self.im, self.dispose_extent) + self._prev_im.paste( + updated, self.dispose_extent, updated.convert("RGBA") + ) + self.im = self._prev_im + if self.pyaccess: + self.pyaccess = None + + def _getexif(self): + if "exif" not in self.info: + self.load() + if "exif" not in self.info and "Raw profile type exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + def getexif(self): + if "exif" not in self.info: + self.load() + + return super().getexif() + + def getxmp(self): + """ + Returns a dictionary containing the XMP tags. + Requires defusedxml to be installed. + + :returns: XMP tags in a dictionary. + """ + return ( + self._getxmp(self.info["XML:com.adobe.xmp"]) + if "XML:com.adobe.xmp" in self.info + else {} + ) + + +# -------------------------------------------------------------------- +# PNG writer + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b"\x01\x00"), + "L;1": ("L;1", b"\x01\x00"), + "L;2": ("L;2", b"\x02\x00"), + "L;4": ("L;4", b"\x04\x00"), + "L": ("L", b"\x08\x00"), + "LA": ("LA", b"\x08\x04"), + "I": ("I;16B", b"\x10\x00"), + "I;16": ("I;16B", b"\x10\x00"), + "P;1": ("P;1", b"\x01\x03"), + "P;2": ("P;2", b"\x02\x03"), + "P;4": ("P;4", b"\x04\x03"), + "P": ("P", b"\x08\x03"), + "RGB": ("RGB", b"\x08\x02"), + "RGBA": ("RGBA", b"\x08\x06"), +} + + +def putchunk(fp, cid, *data): + """Write a PNG chunk (including CRC field)""" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + crc = _crc32(data, _crc32(cid)) + fp.write(o32(crc)) + + +class _idat: + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +class _fdat: + # wrap encoder output in fdAT chunks + + def __init__(self, fp, chunk, seq_num): + self.fp = fp + self.chunk = chunk + self.seq_num = seq_num + + def write(self, data): + self.chunk(self.fp, b"fdAT", o32(self.seq_num), data) + self.seq_num += 1 + + +def _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images): + duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) + loop = im.encoderinfo.get("loop", im.info.get("loop", 0)) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE)) + blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE)) + + if default_image: + chain = itertools.chain(append_images) + else: + chain = itertools.chain([im], append_images) + + im_frames = [] + frame_count = 0 + for im_seq in chain: + for im_frame in ImageSequence.Iterator(im_seq): + if im_frame.mode == rawmode: + im_frame = im_frame.copy() + else: + if rawmode == "P": + im_frame = im_frame.convert(rawmode, palette=im.palette) + else: + im_frame = im_frame.convert(rawmode) + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + if isinstance(blend, (list, tuple)): + encoderinfo["blend"] = blend[frame_count] + frame_count += 1 + + if im_frames: + previous = im_frames[-1] + prev_disposal = previous["encoderinfo"].get("disposal") + prev_blend = previous["encoderinfo"].get("blend") + if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2: + prev_disposal = Disposal.OP_BACKGROUND + + if prev_disposal == Disposal.OP_BACKGROUND: + base_im = previous["im"].copy() + dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0)) + bbox = previous["bbox"] + if bbox: + dispose = dispose.crop(bbox) + else: + bbox = (0, 0) + im.size + base_im.paste(dispose, bbox) + elif prev_disposal == Disposal.OP_PREVIOUS: + base_im = im_frames[-2]["im"] + else: + base_im = previous["im"] + delta = ImageChops.subtract_modulo( + im_frame.convert("RGB"), base_im.convert("RGB") + ) + bbox = delta.getbbox() + if ( + not bbox + and prev_disposal == encoderinfo.get("disposal") + and prev_blend == encoderinfo.get("blend") + ): + if isinstance(duration, (list, tuple)): + previous["encoderinfo"]["duration"] += encoderinfo["duration"] + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + # animation control + chunk( + fp, + b"acTL", + o32(len(im_frames)), # 0: num_frames + o32(loop), # 4: num_plays + ) + + # default image IDAT (if it exists) + if default_image: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + seq_num = 0 + for frame, frame_data in enumerate(im_frames): + im_frame = frame_data["im"] + if not frame_data["bbox"]: + bbox = (0, 0) + im_frame.size + else: + bbox = frame_data["bbox"] + im_frame = im_frame.crop(bbox) + size = im_frame.size + encoderinfo = frame_data["encoderinfo"] + frame_duration = int(round(encoderinfo.get("duration", duration))) + frame_disposal = encoderinfo.get("disposal", disposal) + frame_blend = encoderinfo.get("blend", blend) + # frame control + chunk( + fp, + b"fcTL", + o32(seq_num), # sequence_number + o32(size[0]), # width + o32(size[1]), # height + o32(bbox[0]), # x_offset + o32(bbox[1]), # y_offset + o16(frame_duration), # delay_numerator + o16(1000), # delay_denominator + o8(frame_disposal), # dispose_op + o8(frame_blend), # blend_op + ) + seq_num += 1 + # frame data + if frame == 0 and not default_image: + # first frame must be in IDAT chunks for backwards compatibility + ImageFile._save( + im_frame, + _idat(fp, chunk), + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + else: + fdat_chunks = _fdat(fp, chunk, seq_num) + ImageFile._save( + im_frame, + fdat_chunks, + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + seq_num = fdat_chunks.seq_num + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, chunk=putchunk, save_all=False): + # save an image to disk (called by the save method) + + if save_all: + default_image = im.encoderinfo.get( + "default_image", im.info.get("default_image") + ) + modes = set() + append_images = im.encoderinfo.get("append_images", []) + if default_image: + chain = itertools.chain(append_images) + else: + chain = itertools.chain([im], append_images) + for im_seq in chain: + for im_frame in ImageSequence.Iterator(im_seq): + modes.add(im_frame.mode) + for mode in ("RGBA", "RGB", "P"): + if mode in modes: + break + else: + mode = modes.pop() + else: + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = min(1 << im.encoderinfo["bits"], 256) + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1) + else: + colors = 256 + + if colors <= 16: + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + else: + bits = 4 + mode = f"{mode};{bits}" + + # encoder options + im.encoderconfig = ( + im.encoderinfo.get("optimize", False), + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + im.encoderinfo.get("dictionary", b""), + ) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError as e: + raise OSError(f"cannot write mode {mode} as PNG") from e + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk( + fp, + b"IHDR", + o32(im.size[0]), # 0: size + o32(im.size[1]), + mode, # 8: depth/type + b"\0", # 10: compression + b"\0", # 11: filter category + b"\0", # 12: interlace flag + ) + + chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + # You must either have sRGB or iCCP. + # Disallow sRGB chunks when an iCCP-chunk has been emitted. + chunks.remove(b"sRGB") + + info = im.encoderinfo.get("pnginfo") + if info: + chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + elif cid in chunks_multiple_allowed: + chunk(fp, cid, data) + elif cid[1:2].islower(): + # Private chunk + after_idat = info_chunk[2:3] + if not after_idat: + chunk(fp, cid, data) + + if im.mode == "P": + palette_byte_number = colors * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b"\0" + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = colors + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b"\xFF" * transparency + b"\0" + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode in ("1", "L", "I"): + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise OSError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = colors + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk( + fp, + b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b"\x01", + ) + + if info: + chunks = [b"bKGD", b"hIST"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + + exif = im.encoderinfo.get("exif", im.info.get("exif")) + if exif: + if isinstance(exif, Image.Exif): + exif = exif.tobytes(8) + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + chunk(fp, b"eXIf", exif) + + if save_all: + _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images) + else: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + if info: + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid[1:2].islower(): + # Private chunk + after_idat = info_chunk[2:3] + if after_idat: + chunk(fp, cid, data) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector: + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + crc = o32(_crc32(data, _crc32(cid))) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) +Image.register_save_all(PngImageFile.format, _save_all) + +Image.register_extensions(PngImageFile.format, [".png", ".apng"]) + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/sbsheriff/Lib/site-packages/PIL/PpmImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..392771d --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PpmImagePlugin.py @@ -0,0 +1,342 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import o8 +from ._binary import o32le as o32 + +# +# -------------------------------------------------------------------- + +b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d" + +MODES = { + # standard + b"P1": "1", + b"P2": "L", + b"P3": "RGB", + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK", +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0123456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _read_magic(self): + magic = b"" + # read until whitespace or longest available magic number + for _ in range(6): + c = self.fp.read(1) + if not c or c in b_whitespace: + break + magic += c + return magic + + def _read_token(self): + token = b"" + while len(token) <= 10: # read until next whitespace or limit of 10 characters + c = self.fp.read(1) + if not c: + break + elif c in b_whitespace: # token ended + if not token: + # skip whitespace at start + continue + break + elif c == b"#": + # ignores rest of the line; stops at CR, LF or EOF + while self.fp.read(1) not in b"\r\n": + pass + continue + token += c + if not token: + # Token was not even 1 byte + raise ValueError("Reached EOF while reading header") + elif len(token) > 10: + raise ValueError(f"Token too long in file header: {token.decode()}") + return token + + def _open(self): + magic_number = self._read_magic() + try: + mode = MODES[magic_number] + except KeyError: + raise SyntaxError("not a PPM file") + + if magic_number in (b"P1", b"P4"): + self.custom_mimetype = "image/x-portable-bitmap" + elif magic_number in (b"P2", b"P5"): + self.custom_mimetype = "image/x-portable-graymap" + elif magic_number in (b"P3", b"P6"): + self.custom_mimetype = "image/x-portable-pixmap" + + maxval = None + decoder_name = "raw" + if magic_number in (b"P1", b"P2", b"P3"): + decoder_name = "ppm_plain" + for ix in range(3): + token = int(self._read_token()) + if ix == 0: # token is the x size + xsize = token + elif ix == 1: # token is the y size + ysize = token + if mode == "1": + self.mode = "1" + rawmode = "1;I" + break + else: + self.mode = rawmode = mode + elif ix == 2: # token is maxval + maxval = token + if not 0 < maxval < 65536: + raise ValueError( + "maxval must be greater than 0 and less than 65536" + ) + if maxval > 255 and mode == "L": + self.mode = "I" + + if decoder_name != "ppm_plain": + # If maxval matches a bit depth, use the raw decoder directly + if maxval == 65535 and mode == "L": + rawmode = "I;16B" + elif maxval != 255: + decoder_name = "ppm" + + args = (rawmode, 0, 1) if decoder_name == "raw" else (rawmode, maxval) + self._size = xsize, ysize + self.tile = [(decoder_name, (0, 0, xsize, ysize), self.fp.tell(), args)] + + +# +# -------------------------------------------------------------------- + + +class PpmPlainDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def _read_block(self): + return self.fd.read(ImageFile.SAFEBLOCK) + + def _find_comment_end(self, block, start=0): + a = block.find(b"\n", start) + b = block.find(b"\r", start) + return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1) + + def _ignore_comments(self, block): + if self._comment_spans: + # Finish current comment + while block: + comment_end = self._find_comment_end(block) + if comment_end != -1: + # Comment ends in this block + # Delete tail of comment + block = block[comment_end + 1 :] + break + else: + # Comment spans whole block + # So read the next block, looking for the end + block = self._read_block() + + # Search for any further comments + self._comment_spans = False + while True: + comment_start = block.find(b"#") + if comment_start == -1: + # No comment found + break + comment_end = self._find_comment_end(block, comment_start) + if comment_end != -1: + # Comment ends in this block + # Delete comment + block = block[:comment_start] + block[comment_end + 1 :] + else: + # Comment continues to next block(s) + block = block[:comment_start] + self._comment_spans = True + break + return block + + def _decode_bitonal(self): + """ + This is a separate method because in the plain PBM format, all data tokens are + exactly one byte, so the inter-token whitespace is optional. + """ + data = bytearray() + total_bytes = self.state.xsize * self.state.ysize + + while len(data) != total_bytes: + block = self._read_block() # read next block + if not block: + # eof + break + + block = self._ignore_comments(block) + + tokens = b"".join(block.split()) + for token in tokens: + if token not in (48, 49): + raise ValueError(f"Invalid token for this mode: {bytes([token])}") + data = (data + tokens)[:total_bytes] + invert = bytes.maketrans(b"01", b"\xFF\x00") + return data.translate(invert) + + def _decode_blocks(self, maxval): + data = bytearray() + max_len = 10 + out_byte_count = 4 if self.mode == "I" else 1 + out_max = 65535 if self.mode == "I" else 255 + bands = Image.getmodebands(self.mode) + total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count + + half_token = False + while len(data) != total_bytes: + block = self._read_block() # read next block + if not block: + if half_token: + block = bytearray(b" ") # flush half_token + else: + # eof + break + + block = self._ignore_comments(block) + + if half_token: + block = half_token + block # stitch half_token to new block + + tokens = block.split() + + if block and not block[-1:].isspace(): # block might split token + half_token = tokens.pop() # save half token for later + if len(half_token) > max_len: # prevent buildup of half_token + raise ValueError( + f"Token too long found in data: {half_token[:max_len + 1]}" + ) + + for token in tokens: + if len(token) > max_len: + raise ValueError( + f"Token too long found in data: {token[:max_len + 1]}" + ) + value = int(token) + if value > maxval: + raise ValueError(f"Channel value too large for this mode: {value}") + value = round(value / maxval * out_max) + data += o32(value) if self.mode == "I" else o8(value) + if len(data) == total_bytes: # finished! + break + return data + + def decode(self, buffer): + self._comment_spans = False + if self.mode == "1": + data = self._decode_bitonal() + rawmode = "1;8" + else: + maxval = self.args[-1] + data = self._decode_blocks(maxval) + rawmode = "I;32" if self.mode == "I" else self.mode + self.set_as_raw(bytes(data), rawmode) + return -1, 0 + + +class PpmDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer): + data = bytearray() + maxval = self.args[-1] + in_byte_count = 1 if maxval < 256 else 2 + out_byte_count = 4 if self.mode == "I" else 1 + out_max = 65535 if self.mode == "I" else 255 + bands = Image.getmodebands(self.mode) + while len(data) < self.state.xsize * self.state.ysize * bands * out_byte_count: + pixels = self.fd.read(in_byte_count * bands) + if len(pixels) < in_byte_count * bands: + # eof + break + for b in range(bands): + value = ( + pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count) + ) + value = min(out_max, round(value / maxval * out_max)) + data += o32(value) if self.mode == "I" else o8(value) + rawmode = "I;32" if self.mode == "I" else self.mode + self.set_as_raw(bytes(data), rawmode) + return -1, 0 + + +# +# -------------------------------------------------------------------- + + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + rawmode, head = "I;16B", b"P5" + elif im.mode in ("RGB", "RGBA"): + rawmode, head = "RGB", b"P6" + else: + raise OSError(f"cannot write mode {im.mode} as PPM") + fp.write(head + b"\n%d %d\n" % im.size) + if head == b"P6": + fp.write(b"255\n") + elif head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + else: + fp.write(b"65535\n") + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + + +# +# -------------------------------------------------------------------- + + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_decoder("ppm", PpmDecoder) +Image.register_decoder("ppm_plain", PpmPlainDecoder) + +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"]) + +Image.register_mime(PpmImageFile.format, "image/x-portable-anymap") diff --git a/sbsheriff/Lib/site-packages/PIL/PsdImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..bd10e3b --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PsdImagePlugin.py @@ -0,0 +1,305 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import Image, ImageFile, ImagePalette +from ._binary import i8 +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import si16be as si16 + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3), +} + + +# --------------------------------------------------------------------. +# read PSD images + + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + _close_exclusive_fp_after_loading = False + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if not _accept(s) or i16(s, 4) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s, 22) + psd_channels = i16(s, 12) + psd_mode = i16(s, 24) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise OSError("not enough channels") + if mode == "RGB" and psd_channels == 4: + mode = "RGBA" + channels = 4 + + self.mode = mode + self._size = i32(s, 18), i32(s, 14) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + read(4) # signature + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if len(data) & 1: + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + _layer_data = io.BytesIO(ImageFile._safe_read(self.fp, size)) + self.layers = _layerinfo(_layer_data, size) + self.fp.seek(end) + self.n_frames = len(self.layers) + self.is_animated = self.n_frames > 1 + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 1 + self._min_frame = 1 + + def seek(self, layer): + if not self._seek_check(layer): + return + + # seek to given layer (1..max) + try: + name, mode, bbox, tile = self.layers[layer - 1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError as e: + raise EOFError("no such layer") from e + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + +def _layerinfo(fp, ct_bytes): + # read layerinfo block + layers = [] + + def read(size): + return ImageFile._safe_read(fp, size) + + ct = si16(read(2)) + + # sanity check + if ct_bytes < (abs(ct) * 20): + raise SyntaxError("Layer block too short for number of layers requested") + + for _ in range(abs(ct)): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + mode = [] + ct_types = i16(read(2)) + types = list(range(ct_types)) + if len(types) > 4: + continue + + for _ in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + read(4) # size + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + read(12) # filler + name = "" + size = i32(read(4)) # length of the extra data field + if size: + data_end = fp.tell() + size + + length = i32(read(4)) + if length: + fp.seek(length - 16, io.SEEK_CUR) + + length = i32(read(4)) + if length: + fp.seek(length, io.SEEK_CUR) + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode("latin-1", "replace") + + fp.seek(data_end) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(fp, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize * ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("packbits", bbox, offset, layer)) + for y in range(ysize): + offset = offset + i16(bytecount, i) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") + +Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop") diff --git a/sbsheriff/Lib/site-packages/PIL/PyAccess.py b/sbsheriff/Lib/site-packages/PIL/PyAccess.py new file mode 100644 index 0000000..9a2ec48 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/PyAccess.py @@ -0,0 +1,358 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +import logging +import sys + +try: + from cffi import FFI + + defs = """ + struct Pixel_RGBA { + unsigned char r,g,b,a; + }; + struct Pixel_I16 { + unsigned char l,r; + }; + """ + ffi = FFI() + ffi.cdef(defs) +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import DeferredError + + FFI = ffi = DeferredError(ex) + +logger = logging.getLogger(__name__) + + +class PyAccess: + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast("unsigned char **", vals["image8"]) + self.image32 = ffi.cast("int **", vals["image32"]) + self.image = ffi.cast("unsigned char **", vals["image"]) + self.xsize, self.ysize = img.im.size + self._img = img + + # Keep pointer to im object to prevent dereferencing. + self._im = img.im + if self._im.mode in ("P", "PA"): + self._palette = img.palette + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param color: The pixel value. + """ + if self.readonly: + raise ValueError("Attempt to putpixel a read only image") + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + + if ( + self._im.mode in ("P", "PA") + and isinstance(color, (list, tuple)) + and len(color) in [3, 4] + ): + # RGB or RGBA value for a P or PA image + if self._im.mode == "PA": + alpha = color[3] if len(color) == 4 else 255 + color = color[:3] + color = self._palette.getcolor(color, self._img) + if self._im.mode == "PA": + color = (color, alpha) + + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError("pixel location out of range") + return xy + + +class _PyAccess32_2(PyAccess): + """PA, LA, stored in first and last bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.r, pixel.a + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """RGB and friends, stored in the first three bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.r, pixel.g, pixel.b + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = 255 + + +class _PyAccess32_4(PyAccess): + """RGBA etc, all 4 bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.r, pixel.g, pixel.b, pixel.a + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """1, L, P, 8 bit images stored as uint8""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """I;16 access, native bitendian without conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("unsigned short **", self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """I;16L access, with conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF # noqa: E741 + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """I;16B access, with conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except Exception: + color = min(color[0], 65535) + + pixel.l = color >> 8 # noqa: E741 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """Signed Int32 access, native endian""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """I;32L/B access, with byteswapping conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new("int *", i) + chars = ffi.cast("unsigned char *", orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0] + return ffi.cast("int *", chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """32 bit float access""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("float **", self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except TypeError: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = { + "1": _PyAccess8, + "L": _PyAccess8, + "P": _PyAccess8, + "LA": _PyAccess32_2, + "La": _PyAccess32_2, + "PA": _PyAccess32_2, + "RGB": _PyAccess32_3, + "LAB": _PyAccess32_3, + "HSV": _PyAccess32_3, + "YCbCr": _PyAccess32_3, + "RGBA": _PyAccess32_4, + "RGBa": _PyAccess32_4, + "RGBX": _PyAccess32_4, + "CMYK": _PyAccess32_4, + "F": _PyAccessF, + "I": _PyAccessI32_N, +} + +if sys.byteorder == "little": + mode_map["I;16"] = _PyAccessI16_N + mode_map["I;16L"] = _PyAccessI16_N + mode_map["I;16B"] = _PyAccessI16_B + + mode_map["I;32L"] = _PyAccessI32_N + mode_map["I;32B"] = _PyAccessI32_Swap +else: + mode_map["I;16"] = _PyAccessI16_L + mode_map["I;16L"] = _PyAccessI16_L + mode_map["I;16B"] = _PyAccessI16_N + + mode_map["I;32L"] = _PyAccessI32_Swap + mode_map["I;32B"] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) diff --git a/sbsheriff/Lib/site-packages/PIL/SgiImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..f0207bb --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/SgiImagePlugin.py @@ -0,0 +1,230 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# +# History: +# 2017-22-07 mb Add RLE decompression +# 2016-16-10 mb Add save method without compression +# 1995-09-10 fl Created +# +# Copyright (c) 2016 by Mickael Bonfill. +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import os +import struct + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import o8 + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +MODES = { + (1, 1, 1): "L", + (1, 2, 1): "L", + (2, 1, 1): "L;16B", + (2, 2, 1): "L;16B", + (1, 3, 3): "RGB", + (2, 3, 3): "RGB;16B", + (1, 3, 4): "RGBA", + (2, 3, 4): "RGBA;16B", +} + + +## +# Image plugin for SGI images. +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + headlen = 512 + s = self.fp.read(headlen) + + if not _accept(s): + raise ValueError("Not an SGI image file") + + # compression : verbatim or RLE + compression = s[2] + + # bpc : 1 or 2 bytes (8bits or 16bits) + bpc = s[3] + + # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize) + dimension = i16(s, 4) + + # xsize : width + xsize = i16(s, 6) + + # ysize : height + ysize = i16(s, 8) + + # zsize : channels count + zsize = i16(s, 10) + + # layout + layout = bpc, dimension, zsize + + # determine mode from bits/zsize + rawmode = "" + try: + rawmode = MODES[layout] + except KeyError: + pass + + if rawmode == "": + raise ValueError("Unsupported SGI image mode") + + self._size = xsize, ysize + self.mode = rawmode.split(";")[0] + if self.mode == "RGB": + self.custom_mimetype = "image/rgb" + + # orientation -1 : scanlines begins at the bottom-left corner + orientation = -1 + + # decoder info + if compression == 0: + pagesize = xsize * ysize * bpc + if bpc == 2: + self.tile = [ + ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation)) + ] + else: + self.tile = [] + offset = headlen + for layer in self.mode: + self.tile.append( + ("raw", (0, 0) + self.size, offset, (layer, 0, orientation)) + ) + offset += pagesize + elif compression == 1: + self.tile = [ + ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc)) + ] + + +def _save(im, fp, filename): + if im.mode != "RGB" and im.mode != "RGBA" and im.mode != "L": + raise ValueError("Unsupported SGI image mode") + + # Get the keyword arguments + info = im.encoderinfo + + # Byte-per-pixel precision, 1 = 8bits per pixel + bpc = info.get("bpc", 1) + + if bpc not in (1, 2): + raise ValueError("Unsupported number of bytes per pixel") + + # Flip the image, since the origin of SGI file is the bottom-left corner + orientation = -1 + # Define the file as SGI File Format + magic_number = 474 + # Run-Length Encoding Compression - Unsupported at this time + rle = 0 + + # Number of dimensions (x,y,z) + dim = 3 + # X Dimension = width / Y Dimension = height + x, y = im.size + if im.mode == "L" and y == 1: + dim = 1 + elif im.mode == "L": + dim = 2 + # Z Dimension: Number of channels + z = len(im.mode) + + if dim == 1 or dim == 2: + z = 1 + + # assert we've got the right number of bands. + if len(im.getbands()) != z: + raise ValueError( + f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}" + ) + + # Minimum Byte value + pinmin = 0 + # Maximum Byte value (255 = 8bits per pixel) + pinmax = 255 + # Image name (79 characters max, truncated below in write) + img_name = os.path.splitext(os.path.basename(filename))[0] + img_name = img_name.encode("ascii", "ignore") + # Standard representation of pixel in the file + colormap = 0 + fp.write(struct.pack(">h", magic_number)) + fp.write(o8(rle)) + fp.write(o8(bpc)) + fp.write(struct.pack(">H", dim)) + fp.write(struct.pack(">H", x)) + fp.write(struct.pack(">H", y)) + fp.write(struct.pack(">H", z)) + fp.write(struct.pack(">l", pinmin)) + fp.write(struct.pack(">l", pinmax)) + fp.write(struct.pack("4s", b"")) # dummy + fp.write(struct.pack("79s", img_name)) # truncates to 79 chars + fp.write(struct.pack("s", b"")) # force null byte after img_name + fp.write(struct.pack(">l", colormap)) + fp.write(struct.pack("404s", b"")) # dummy + + rawmode = "L" + if bpc == 2: + rawmode = "L;16B" + + for channel in im.split(): + fp.write(channel.tobytes("raw", rawmode, 0, orientation)) + + if hasattr(fp, "flush"): + fp.flush() + + +class SGI16Decoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer): + rawmode, stride, orientation = self.args + pagesize = self.state.xsize * self.state.ysize + zsize = len(self.mode) + self.fd.seek(512) + + for band in range(zsize): + channel = Image.new("L", (self.state.xsize, self.state.ysize)) + channel.frombytes( + self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation + ) + self.im.putband(channel.im, band) + + return -1, 0 + + +# +# registry + + +Image.register_decoder("SGI16", SGI16Decoder) +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) +Image.register_save(SgiImageFile.format, _save) +Image.register_mime(SgiImageFile.format, "image/sgi") + +Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"]) + +# End of file diff --git a/sbsheriff/Lib/site-packages/PIL/SpiderImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..acafc32 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/SpiderImagePlugin.py @@ -0,0 +1,313 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# +import os +import struct +import sys + +from PIL import Image, ImageFile + + +def isInt(f): + try: + i = int(f) + if f - i == 0: + return 1 + else: + return 0 + except (ValueError, OverflowError): + return 0 + + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no. of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + with open(filename, "rb") as fp: + f = fp.read(92) # read 23 * 4 bytes + t = struct.unpack(">23f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack("<23f", f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + _close_exclusive_fp_after_loading = False + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack(">27f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack("<27f", f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error as e: + raise SyntaxError("not a valid Spider file") from e + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self._size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))] + self._fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + raise EOFError("attempt to seek in a non-stack file") + if not self._seek_check(frame): + return + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self._fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum - minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage""" + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print(f"unable to find {img}") + continue + try: + with Image.open(img) as im: + im = im.convert2byte() + except Exception: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info["filename"] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = int(1024 / lenbyt) + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + nvalues = int(labbyt / 4) + if nvalues < 23: + return [] + + hdr = [] + for i in range(nvalues): + hdr.append(0.0) + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[3] = float(nrow) # number of records in the image + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + return [struct.pack("f", v) for v in hdr] + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert("F") + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise OSError("Error creating Spider header") + + # write the SPIDER header + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + + +# -------------------------------------------------------------------- + + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python3 SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + with Image.open(filename) as im: + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=" ") + print(im.getextrema()) + + if len(sys.argv) > 2: + outfile = sys.argv[2] + + # perform some image operation + im = im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + print( + f"saving a flipped version of {os.path.basename(filename)} " + f"as {outfile} " + ) + im.save(outfile, SpiderImageFile.format) diff --git a/sbsheriff/Lib/site-packages/PIL/SunImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/SunImagePlugin.py new file mode 100644 index 0000000..c03759a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/SunImagePlugin.py @@ -0,0 +1,136 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i32be as i32 + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59A66A95 + + +## +# Image plugin for Sun raster files. + + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # The Sun Raster file header is 32 bytes in length + # and has the following format: + + # typedef struct _SunRaster + # { + # DWORD MagicNumber; /* Magic (identification) number */ + # DWORD Width; /* Width of image in pixels */ + # DWORD Height; /* Height of image in pixels */ + # DWORD Depth; /* Number of bits per pixel */ + # DWORD Length; /* Size of image data in bytes */ + # DWORD Type; /* Type of raster file */ + # DWORD ColorMapType; /* Type of color map */ + # DWORD ColorMapLength; /* Size of the color map in bytes */ + # } SUNRASTER; + + # HEAD + s = self.fp.read(32) + if not _accept(s): + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self._size = i32(s, 4), i32(s, 8) + + depth = i32(s, 12) + # data_length = i32(s, 16) # unreliable, ignore. + file_type = i32(s, 20) + palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary + palette_length = i32(s, 28) + + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 4: + self.mode, rawmode = "L", "L;4" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + if file_type == 3: + self.mode, rawmode = "RGB", "RGB" + else: + self.mode, rawmode = "RGB", "BGR" + elif depth == 32: + if file_type == 3: + self.mode, rawmode = "RGB", "RGBX" + else: + self.mode, rawmode = "RGB", "BGRX" + else: + raise SyntaxError("Unsupported Mode/Bit Depth") + + if palette_length: + if palette_length > 1024: + raise SyntaxError("Unsupported Color Palette Length") + + if palette_type != 1: + raise SyntaxError("Unsupported Palette Type") + + offset = offset + palette_length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length)) + if self.mode == "L": + self.mode = "P" + rawmode = rawmode.replace("L", "P") + + # 16 bit boundaries on stride + stride = ((self.size[0] * depth + 15) // 16) * 2 + + # file type: Type is the version (or flavor) of the bitmap + # file. The following values are typically found in the Type + # field: + # 0000h Old + # 0001h Standard + # 0002h Byte-encoded + # 0003h RGB format + # 0004h TIFF format + # 0005h IFF format + # FFFFh Experimental + + # Old and standard are the same, except for the length tag. + # byte-encoded is run-length-encoded + # RGB looks similar to standard, but RGB byte order + # TIFF and IFF mean that they were converted from T/IFF + # Experimental means that it's something else. + # (https://www.fileformat.info/format/sunraster/egff.htm) + + if file_type in (0, 1, 3, 4, 5): + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))] + elif file_type == 2: + self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)] + else: + raise SyntaxError("Unsupported Sun Raster file type") + + +# +# registry + + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/sbsheriff/Lib/site-packages/PIL/TarIO.py b/sbsheriff/Lib/site-packages/PIL/TarIO.py new file mode 100644 index 0000000..d108362 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/TarIO.py @@ -0,0 +1,65 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import ContainerIO + + +class TarIO(ContainerIO.ContainerIO): + """A file object that provides read access to a given member of a TAR file.""" + + def __init__(self, tarfile, file): + """ + Create file object. + + :param tarfile: Name of TAR file. + :param file: Name of member file. + """ + self.fh = open(tarfile, "rb") + + while True: + + s = self.fh.read(512) + if len(s) != 512: + raise OSError("unexpected end of tar file") + + name = s[:100].decode("utf-8") + i = name.find("\0") + if i == 0: + raise OSError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + self.fh.seek((size + 511) & (~511), io.SEEK_CUR) + + # Open region + super().__init__(self.fh, self.fh.tell(), size) + + # Context manager support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.fh.close() diff --git a/sbsheriff/Lib/site-packages/PIL/TgaImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..cd454b7 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/TgaImagePlugin.py @@ -0,0 +1,254 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +import warnings + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +# +# -------------------------------------------------------------------- +# Read RGA file + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (3, 16): "LA", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + id_len = s[0] + + colormaptype = s[1] + imagetype = s[2] + + depth = s[16] + + flags = s[17] + + self._size = i16(s, 12), i16(s, 14) + + # validate header fields + if ( + colormaptype not in (0, 1) + or self.size[0] <= 0 + or self.size[1] <= 0 + or depth not in (1, 8, 16, 24, 32) + ): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif depth == 16: + self.mode = "LA" + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + self._flip_horizontally = orientation in [0x10, 0x30] + if orientation in [0x20, 0x30]: + orientation = 1 + elif orientation in [0, 0x10]: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if id_len: + self.info["id_section"] = self.fp.read(id_len) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s, 3), i16(s, 5), s[7] + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;15", b"\0" * 2 * start + self.fp.read(2 * size) + ) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0" * 3 * start + self.fp.read(3 * size) + ) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0" * 4 * start + self.fp.read(4 * size) + ) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [ + ( + "tga_rle", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, orientation, depth), + ) + ] + else: + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, 0, orientation), + ) + ] + except KeyError: + pass # cannot decode + + def load_end(self): + if self._flip_horizontally: + self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + + +# +# -------------------------------------------------------------------- +# Write TGA file + + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "LA": ("LA", 16, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as TGA") from e + + if "rle" in im.encoderinfo: + rle = im.encoderinfo["rle"] + else: + compression = im.encoderinfo.get("compression", im.info.get("compression")) + rle = compression == "tga_rle" + if rle: + imagetype += 8 + + id_section = im.encoderinfo.get("id_section", im.info.get("id_section", "")) + id_len = len(id_section) + if id_len > 255: + id_len = 255 + id_section = id_section[:255] + warnings.warn("id_section has been trimmed to 255 characters") + + if colormaptype: + palette = im.im.getpalette("RGB", "BGR") + colormaplength, colormapentry = len(palette) // 3, 24 + else: + colormaplength, colormapentry = 0, 0 + + if im.mode in ("LA", "RGBA"): + flags = 8 + else: + flags = 0 + + orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1)) + if orientation > 0: + flags = flags | 0x20 + + fp.write( + o8(id_len) + + o8(colormaptype) + + o8(imagetype) + + o16(0) # colormapfirst + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags) + ) + + if id_section: + fp.write(id_section) + + if colormaptype: + fp.write(palette) + + if rle: + ImageFile._save( + im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))] + ) + else: + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))] + ) + + # write targa version 2 footer + fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"]) + +Image.register_mime(TgaImageFile.format, "image/x-tga") diff --git a/sbsheriff/Lib/site-packages/PIL/TiffImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..1dfd527 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,2161 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +import io +import itertools +import logging +import math +import os +import struct +import warnings +from collections.abc import MutableMapping +from fractions import Fraction +from numbers import Number, Rational + +from . import Image, ImageFile, ImageOps, ImagePalette, TiffTags +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from .TiffTags import TYPES + +logger = logging.getLogger(__name__) + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True +STRIP_SIZE = 65536 + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +TRANSFERFUNCTION = 301 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEWIDTH = 322 +TILELENGTH = 323 +TILEOFFSETS = 324 +TILEBYTECOUNTS = 325 +SUBIFD = 330 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +YCBCRSUBSAMPLING = 530 +REFERENCEBLACKWHITE = 532 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 +JPEGQUALITY = 65537 # pseudo-tag by libtiff + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", + 34925: "lzma", + 50000: "zstd", + 50001: "webp", +} + +COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"), + (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), + (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"), + # JPEG compressed images handled by LibTiff and auto-converted to RGBX + # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel + (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO.keys()) + +PREFIXES = [ + b"MM\x00\x2A", # Valid TIFF header with big-endian byte order + b"II\x2A\x00", # Valid TIFF header with little-endian byte order + b"MM\x2A\x00", # Invalid TIFF header, assume big-endian + b"II\x00\x2A", # Invalid TIFF header, assume little-endian + b"MM\x00\x2B", # BigTIFF with big-endian byte order + b"II\x2B\x00", # BigTIFF with little-endian byte order +] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + + +def _limit_signed_rational(val, max_val, min_val): + frac = Fraction(val) + n_d = frac.numerator, frac.denominator + + if min(n_d) < min_val: + n_d = _limit_rational(val, abs(min_val)) + + if max(n_d) > max_val: + val = Fraction(*n_d) + n_d = _limit_rational(val, max_val) + + return n_d + + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class IFDRational(Rational): + """Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ("_numerator", "_denominator", "_val") + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + if isinstance(value, IFDRational): + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value._val + return + + if isinstance(value, Fraction): + self._numerator = value.numerator + self._denominator = value.denominator + else: + self._numerator = value + self._denominator = denominator + + if denominator == 0: + self._val = float("nan") + elif denominator == 1: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(self): + return self._numerator + + @property + def denominator(self): + return self._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return self.numerator, self.denominator + + f = self._val.limit_denominator(max_denominator) + return f.numerator, f.denominator + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self, other): + val = self._val + if isinstance(other, IFDRational): + other = other._val + if isinstance(other, float): + val = float(val) + return val == other + + def __getstate__(self): + return [self._val, self._numerator, self._denominator] + + def __setstate__(self, state): + IFDRational.__init__(self, 0) + _val, _numerator, _denominator = state + self._val = _val + self._numerator = _numerator + self._denominator = _denominator + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + + return delegate + + """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', 'rfloordiv', + 'mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool', + 'ceil', 'floor', 'round'] + print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) + """ + + __add__ = _delegate("__add__") + __radd__ = _delegate("__radd__") + __sub__ = _delegate("__sub__") + __rsub__ = _delegate("__rsub__") + __mul__ = _delegate("__mul__") + __rmul__ = _delegate("__rmul__") + __truediv__ = _delegate("__truediv__") + __rtruediv__ = _delegate("__rtruediv__") + __floordiv__ = _delegate("__floordiv__") + __rfloordiv__ = _delegate("__rfloordiv__") + __mod__ = _delegate("__mod__") + __rmod__ = _delegate("__rmod__") + __pow__ = _delegate("__pow__") + __rpow__ = _delegate("__rpow__") + __pos__ = _delegate("__pos__") + __neg__ = _delegate("__neg__") + __abs__ = _delegate("__abs__") + __trunc__ = _delegate("__trunc__") + __lt__ = _delegate("__lt__") + __gt__ = _delegate("__gt__") + __le__ = _delegate("__le__") + __ge__ = _delegate("__ge__") + __bool__ = _delegate("__bool__") + __ceil__ = _delegate("__ceil__") + __floor__ = _delegate("__floor__") + __round__ = _delegate("__round__") + + +class ImageFileDirectory_v2(MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * ``self.tagtype = {}`` + + * Key: numerical TIFF tag number + * Value: integer corresponding to the data type from + :py:data:`.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + + 'Internal' data structures: + + * ``self._tags_v2 = {}`` + + * Key: numerical TIFF tag number + * Value: decoded data, as tuple for multiple values + + * ``self._tagdata = {}`` + + * Key: numerical TIFF tag number + * Value: undecoded byte string from file + + * ``self._tags_v1 = {}`` + + * Key: numerical TIFF tag number + * Value: decoded data in the v1 format + + Tags will be found in the private attributes ``self._tagdata``, and in + ``self._tags_v2`` once decoded. + + ``self.legacy_api`` is a value for internal use, and shouldn't be changed + from outside code. In cooperation with + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api`` + is true, then decoded tags will be populated into both ``_tags_v1`` and + ``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF + save routine. Tags should be read from ``_tags_v1`` if + ``legacy_api == true``. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None, group=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if not _accept(ifh): + raise SyntaxError(f"not a TIFF file (header {repr(ifh)} not valid)") + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self._bigtiff = ifh[2] == 43 + self.group = group + self.tagtype = {} + """ Dictionary of tag types """ + self.reset() + (self.next,) = ( + self._unpack("Q", ifh[8:]) if self._bigtiff else self._unpack("L", ifh[4:]) + ) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return { + TiffTags.lookup(code, self.group).name: value + for code, value in self.items() + } + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + + info = TiffTags.lookup(tag, self.group) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = TiffTags.UNDEFINED + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = ( + TiffTags.RATIONAL + if all(v >= 0 for v in values) + else TiffTags.SIGNED_RATIONAL + ) + elif all(isinstance(v, int) for v in values): + if all(0 <= v < 2**16 for v in values): + self.tagtype[tag] = TiffTags.SHORT + elif all(-(2**15) < v < 2**15 for v in values): + self.tagtype[tag] = TiffTags.SIGNED_SHORT + else: + self.tagtype[tag] = ( + TiffTags.LONG + if all(v >= 0 for v in values) + else TiffTags.SIGNED_LONG + ) + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = TiffTags.DOUBLE + elif all(isinstance(v, str) for v in values): + self.tagtype[tag] = TiffTags.ASCII + elif all(isinstance(v, bytes) for v in values): + self.tagtype[tag] = TiffTags.BYTE + + if self.tagtype[tag] == TiffTags.UNDEFINED: + values = [ + v.encode("ascii", "replace") if isinstance(v, str) else v + for v in values + ] + elif self.tagtype[tag] == TiffTags.RATIONAL: + values = [float(v) if isinstance(v, int) else v for v in values] + + is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict) + if not is_ifd: + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + # Three branches: + # Spec'd length == 1, Actual length 1, store as element + # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. + # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. + # Don't mess with the legacy api, since it's frozen. + if not is_ifd and ( + (info.length == 1) + or self.tagtype[tag] == TiffTags.BYTE + or (info.length is None and len(values) == 1 and not legacy_api) + ): + # Don't mess with the legacy api, since it's frozen. + if legacy_api and self.tagtype[tag] in [ + TiffTags.RATIONAL, + TiffTags.SIGNED_RATIONAL, + ]: # rationals + values = (values,) + try: + (dest[tag],) = values + except ValueError: + # We've got a builtin tag with 1 expected entry + warnings.warn( + f"Metadata Warning, tag {tag} had too many entries: " + f"{len(values)}, expected 1" + ) + dest[tag] = values[0] + + else: + # Spec'd length > 1 or undefined + # Unspec'd, and length > 1 + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from .TiffTags import TYPES + + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func # noqa: F821 + return func + + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func # noqa: F821 + return func + + return decorator + + def _register_basic(idx_fmt_name): + from .TiffTags import TYPES + + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = ( # noqa: F821 + size, + lambda self, data, legacy_api=True: ( + self._unpack(f"{len(data) // size}{fmt}", data) + ), + ) + _write_dispatch[idx] = lambda self, *values: ( # noqa: F821 + b"".join(self._pack(fmt, value) for value in values) + ) + + list( + map( + _register_basic, + [ + (TiffTags.SHORT, "H", "short"), + (TiffTags.LONG, "L", "long"), + (TiffTags.SIGNED_BYTE, "b", "signed byte"), + (TiffTags.SIGNED_SHORT, "h", "signed short"), + (TiffTags.SIGNED_LONG, "l", "signed long"), + (TiffTags.FLOAT, "f", "float"), + (TiffTags.DOUBLE, "d", "double"), + (TiffTags.IFD, "L", "long"), + (TiffTags.LONG8, "Q", "long8"), + ], + ) + ) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if not isinstance(value, bytes): + value = value.encode("ascii", "replace") + return value + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack(f"{len(data) // 4}L", data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join( + self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values + ) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack(f"{len(data) // 4}l", data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join( + self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31))) + for frac in values + ) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise OSError( + "Corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(ret)}. " + ) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + tag_count = ( + self._unpack("Q", self._ensure_read(fp, 8)) + if self._bigtiff + else self._unpack("H", self._ensure_read(fp, 2)) + )[0] + for i in range(tag_count): + tag, typ, count, data = ( + self._unpack("HHQ8s", self._ensure_read(fp, 20)) + if self._bigtiff + else self._unpack("HHL4s", self._ensure_read(fp, 12)) + ) + + tagname = TiffTags.lookup(tag, self.group).name + typname = TYPES.get(typ, "unknown") + msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})" + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + logger.debug(msg + f" - unsupported type {typ}") + continue # ignore unsupported type + size = count * unit_size + if size > (8 if self._bigtiff else 4): + here = fp.tell() + (offset,) = self._unpack("Q" if self._bigtiff else "L", data) + msg += f" Tag Location: {here} - Data Location: {offset}" + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn( + "Possibly corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(data)}." + f" Skipping tag {tag}" + ) + logger.debug(msg) + continue + + if not data: + logger.debug(msg) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + msg += " - value: " + ( + "" % size if size > 32 else repr(data) + ) + logger.debug(msg) + + (self.next,) = ( + self._unpack("Q", self._ensure_read(fp, 8)) + if self._bigtiff + else self._unpack("L", self._ensure_read(fp, 4)) + ) + except OSError as msg: + warnings.warn(str(msg)) + return + + def tobytes(self, offset=0): + # FIXME What about tagdata? + result = self._pack("H", len(self._tags_v2)) + + entries = [] + offset = offset + len(result) + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + logger.debug(f"Tag {tag}, Type: {typ}, Value: {repr(value)}") + is_ifd = typ == TiffTags.LONG and isinstance(value, dict) + if is_ifd: + if self._endian == "<": + ifh = b"II\x2A\x00\x08\x00\x00\x00" + else: + ifh = b"MM\x00\x2A\x00\x00\x00\x08" + ifd = ImageFileDirectory_v2(ifh, group=tag) + values = self._tags_v2[tag] + for ifd_tag, ifd_value in values.items(): + ifd[ifd_tag] = ifd_value + data = ifd.tobytes(offset) + else: + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + + tagname = TiffTags.lookup(tag, self.group).name + typname = "ifd" if is_ifd else TYPES.get(typ, "unknown") + msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})" + msg += " - value: " + ( + "" % len(data) if len(data) >= 16 else str(values) + ) + logger.debug(msg) + + # count is sum of lengths for string and arbitrary data + if is_ifd: + count = 1 + elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]: + count = len(data) + else: + count = len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError("multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + logger.debug(f"{tag} {typ} {count} {repr(value)} {repr(data)}") + result += self._pack("HHL4s", tag, typ, count, value) + + # -- overwrite here for multi-page -- + result += b"\0\0\0\0" # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + result += data + if len(data) & 1: + result += b"\0" + + return result + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + offset = fp.tell() + result = self.tobytes(offset) + fp.write(result) + return offset + len(result) + + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + # defined in ImageFileDirectory_v2 + tagtype: dict + """Dictionary of tag types""" + + @classmethod + def from_v2(cls, original): + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + _close_exclusive_fp_after_loading = False + + def __init__(self, fp=None, filename=None): + self.tag_v2 = None + """ Image file directory (tag dictionary) """ + + self.tag = None + """ Legacy tag entries """ + + super().__init__(fp, filename) + + def _open(self): + """Open the first image in a TIFF file""" + + # Header + ifh = self.fp.read(8) + if ifh[2] == 43: + ifh += self.fp.read(8) + + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy IFD entries will be filled in later + self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self._fp = self.fp + self._frame_pos = [] + self._n_frames = None + + logger.debug("*** TiffImageFile._open ***") + logger.debug(f"- __first: {self.__first}") + logger.debug(f"- ifh: {repr(ifh)}") # Use repr to avoid str(bytes) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + self._seek(len(self._frame_pos)) + while self._n_frames is None: + self._seek(self.tell() + 1) + self.seek(current) + return self._n_frames + + def seek(self, frame): + """Select a given frame as current image""" + if not self._seek_check(frame): + return + self._seek(frame) + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self._fp + + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + logger.debug( + f"Seeking to frame {frame}, on frame {self.__frame}, " + f"__next {self.__next}, location: {self.fp.tell()}" + ) + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + logger.debug("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + if self.tag_v2.next in self._frame_pos: + # This IFD has already been processed + # Declare this to be the end of the image + self.__next = 0 + else: + self.__next = self.tag_v2.next + if self.__next == 0: + self._n_frames = frame + 1 + if len(self._frame_pos) == 1: + self.is_animated = self.__next != 0 + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + self._reload_exif() + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + """Return the current frame number""" + return self.__frame + + def get_child_images(self): + if SUBIFD not in self.tag_v2: + return [] + child_images = [] + exif = self.getexif() + offset = None + for im_offset in self.tag_v2[SUBIFD]: + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + current_offset = self._fp.tell() + if offset is None: + offset = current_offset + + fp = self._fp + ifd = exif._get_ifd_dict(im_offset) + jpegInterchangeFormat = ifd.get(513) + if jpegInterchangeFormat is not None: + fp.seek(jpegInterchangeFormat) + jpeg_data = fp.read(ifd.get(514)) + + fp = io.BytesIO(jpeg_data) + + with Image.open(fp) as im: + if jpegInterchangeFormat is None: + im._frame_pos = [im_offset] + im._seek(0) + im.load() + child_images.append(im) + + if offset is not None: + self._fp.seek(offset) + return child_images + + def getxmp(self): + """ + Returns a dictionary containing the XMP tags. + Requires defusedxml to be installed. + + :returns: XMP tags in a dictionary. + """ + return self._getxmp(self.tag_v2[XMP]) if XMP in self.tag_v2 else {} + + def get_photoshop_blocks(self): + """ + Returns a dictionary of Photoshop "Image Resource Blocks". + The keys are the image resource ID. For more information, see + https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727 + + :returns: Photoshop "Image Resource Blocks" in a dictionary. + """ + blocks = {} + val = self.tag_v2.get(0x8649) + if val: + while val[:4] == b"8BIM": + id = i16(val[4:6]) + n = math.ceil((val[6] + 1) / 2) * 2 + size = i32(val[6 + n : 10 + n]) + data = val[10 + n : 10 + n + size] + blocks[id] = {"data": data} + + val = val[math.ceil((10 + n + size) / 2) * 2 :] + return blocks + + def load(self): + if self.tile and self.use_load_libtiff: + return self._load_libtiff() + return super().load() + + def load_end(self): + if self._tile_orientation: + method = { + 2: Image.Transpose.FLIP_LEFT_RIGHT, + 3: Image.Transpose.ROTATE_180, + 4: Image.Transpose.FLIP_TOP_BOTTOM, + 5: Image.Transpose.TRANSPOSE, + 6: Image.Transpose.ROTATE_270, + 7: Image.Transpose.TRANSVERSE, + 8: Image.Transpose.ROTATE_90, + }.get(self._tile_orientation) + if method is not None: + self.im = self.im.transpose(method) + self._size = self.im.size + + # allow closing if we're on the first frame, there's no next + # This is the ImageFile.load path only, libtiff specific below. + if not self.is_animated: + self._close_exclusive_fp_after_loading = True + + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + + # load IFD data from fp before it is closed + exif = self.getexif() + for key in TiffTags.TAGS_V2_GROUPS.keys(): + if key not in exif: + continue + exif.get_ifd(key) + + def _load_libtiff(self): + """Overload method triggered when we detect a compressed tiff + Calls out to libtiff""" + + Image.Image.load(self) + + self.load_prepare() + + if not len(self.tile) == 1: + raise OSError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = list(self.tile[0][3]) + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except OSError: + # io.BytesIO have a fileno, but returns an OSError if + # it doesn't use a file descriptor. + fp = False + + if fp: + args[2] = fp + + decoder = Image._getdecoder( + self.mode, "libtiff", tuple(args), self.decoderconfig + ) + try: + decoder.setimage(self.im, extents) + except ValueError as e: + raise OSError("Couldn't set the image") from e + + close_self_fp = self._exclusive_fp and not self.is_animated + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an OSError if there's no underlying fp. Easier to + # deal with here by reordering. + logger.debug("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif fp: + # we've got a actual file on disk, pass in the fp. + logger.debug("have fileno, calling fileno version of the decoder.") + if not close_self_fp: + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + logger.debug("don't have fileno or getvalue. just reading") + self.fp.seek(0) + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + if fp: + try: + os.close(fp) + except OSError: + pass + + self.tile = [] + self.readonly = 0 + + self.load_end() + + # libtiff closed the fp in a, we need to close self.fp, if possible + if close_self_fp: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise OSError(err) + + return Image.Image.load(self) + + def _setup(self): + """Setup this image object based on current tags""" + + if 0xBC01 in self.tag_v2: + raise OSError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + # old style jpeg compression images most certainly are YCbCr + if self._compression == "tiff_jpeg": + photo = 6 + + fillorder = self.tag_v2.get(FILLORDER, 1) + + logger.debug("*** Summary ***") + logger.debug(f"- compression: {self._compression}") + logger.debug(f"- photometric_interpretation: {photo}") + logger.debug(f"- planar_configuration: {self._planar_configuration}") + logger.debug(f"- fill_order: {fillorder}") + logger.debug(f"- YCbCr subsampling: {self.tag.get(YCBCRSUBSAMPLING)}") + + # size + xsize = int(self.tag_v2.get(IMAGEWIDTH)) + ysize = int(self.tag_v2.get(IMAGELENGTH)) + self._size = xsize, ysize + + logger.debug(f"- size: {self.size}") + + sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1: + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sample_format = (1,) + + bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,)) + extra_tuple = self.tag_v2.get(EXTRASAMPLES, ()) + if photo in (2, 6, 8): # RGB, YCbCr, LAB + bps_count = 3 + elif photo == 5: # CMYK + bps_count = 4 + else: + bps_count = 1 + bps_count += len(extra_tuple) + bps_actual_count = len(bps_tuple) + samples_per_pixel = self.tag_v2.get( + SAMPLESPERPIXEL, + 3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1, + ) + + if samples_per_pixel > MAX_SAMPLESPERPIXEL: + # DOS check, samples_per_pixel can be a Long, and we extend the tuple below + logger.error( + "More samples per pixel than can be decoded: %s", samples_per_pixel + ) + raise SyntaxError("Invalid value for samples per pixel") + + if samples_per_pixel < bps_actual_count: + # If a file has more values in bps_tuple than expected, + # remove the excess. + bps_tuple = bps_tuple[:samples_per_pixel] + elif samples_per_pixel > bps_actual_count and bps_actual_count == 1: + # If a file has only one value in bps_tuple, when it should have more, + # presume it is the same number of bits for all of the samples. + bps_tuple = bps_tuple * samples_per_pixel + + if len(bps_tuple) != samples_per_pixel: + raise SyntaxError("unknown data organization") + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, + photo, + sample_format, + fillorder, + bps_tuple, + extra_tuple, + ) + logger.debug(f"format key: {key}") + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError as e: + logger.debug("- unsupported format") + raise SyntaxError("unknown pixel mode") from e + + logger.debug(f"- raw mode: {rawmode}") + logger.debug(f"- pil mode: {self.mode}") + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT) + if resunit == 2: # dots per inch + self.info["dpi"] = (xres, yres) + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = (xres * 2.54, yres * 2.54) + elif resunit is None: # used to default to 1, but now 2) + self.info["dpi"] = (xres, yres) + # For backward compatibility, + # we also preserve the old behavior + self.info["resolution"] = xres, yres + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = layer = 0 + self.tile = [] + self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw" + if self.use_load_libtiff: + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + # Replace fillorder with fillorder=1 + key = key[:3] + (1,) + key[4:] + logger.debug(f"format key: {key}") + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if rawmode == "I;16": + rawmode = "I;16N" + if ";16B" in rawmode: + rawmode = rawmode.replace(";16B", ";16N") + if ";16L" in rawmode: + rawmode = rawmode.replace(";16L", ";16N") + + # YCbCr images with new jpeg compression with pixels in one plane + # unpacked straight into RGB values + if ( + photo == 6 + and self._compression == "jpeg" + and self._planar_configuration == 1 + ): + rawmode = "RGB" + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, False, self.tag_v2.offset) + self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a)) + + elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: + # striped image + if STRIPOFFSETS in self.tag_v2: + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + else: + # tiled image + offsets = self.tag_v2[TILEOFFSETS] + w = self.tag_v2.get(TILEWIDTH) + h = self.tag_v2.get(TILELENGTH) + + for offset in offsets: + if x + w > xsize: + stride = w * sum(bps_tuple) / 8 # bytes per line + else: + stride = 0 + + tile_rawmode = rawmode + if self._planar_configuration == 2: + # each band on it's own layer + tile_rawmode = rawmode[layer] + # adjust stride width accordingly + stride /= bps_count + + a = (tile_rawmode, int(stride), 1) + self.tile.append( + ( + self._compression, + (x, y, min(x + w, xsize), min(y + h, ysize)), + offset, + a, + ) + ) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + layer += 1 + else: + logger.debug("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # Fix up info. + if ICCPROFILE in self.tag_v2: + self.info["icc_profile"] = self.tag_v2[ICCPROFILE] + + # fixup palette descriptor + + if self.mode in ["P", "PA"]: + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) + + self._tile_orientation = self.tag_v2.get(0x0112) + + +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as TIFF") from e + + ifd = ImageFileDirectory_v2(prefix=prefix) + + encoderinfo = im.encoderinfo + encoderconfig = im.encoderconfig + try: + compression = encoderinfo["compression"] + except KeyError: + compression = im.info.get("compression") + if isinstance(compression, int): + # compression value may be from BMP. Ignore it + compression = None + if compression is None: + compression = "raw" + elif compression == "tiff_jpeg": + # OJPEG is obsolete, so use new-style JPEG compression instead + compression = "jpeg" + elif compression == "tiff_deflate": + compression = "tiff_adobe_deflate" + + libtiff = WRITE_LIBTIFF or compression != "raw" + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = 1 + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + if "tiffinfo" in encoderinfo: + info = encoderinfo["tiffinfo"] + elif "exif" in encoderinfo: + info = encoderinfo["exif"] + if isinstance(info, bytes): + exif = Image.Exif() + exif.load(info) + info = exif + else: + info = {} + logger.debug("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS.keys(): + ifd[key] = info.get_ifd(key) + else: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except Exception: + pass # might not be an IFD. Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, "tag_v2"): + # preserve tags from original TIFF image file + for key in ( + RESOLUTION_UNIT, + X_RESOLUTION, + Y_RESOLUTION, + IPTC_NAA_CHUNK, + PHOTOSHOP_CHUNK, + XMP, + ): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype[key] + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + icc = encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + ifd[ICCPROFILE] = icc + + for key, name in [ + (IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright"), + ]: + if name in encoderinfo: + ifd[key] = encoderinfo[name] + + dpi = encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + if PHOTOMETRIC_INTERPRETATION not in ifd: + ifd[PHOTOMETRIC_INTERPRETATION] = photo + elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0: + if im.mode == "1": + inverted_im = im.copy() + px = inverted_im.load() + for y in range(inverted_im.height): + for x in range(inverted_im.width): + px[x, y] = 0 if px[x, y] == 255 else 255 + im = inverted_im + else: + im = ImageOps.invert(im) + + if im.mode in ["P", "PA"]: + lut = im.im.getpalette("RGB", "RGB;L") + colormap = [] + colors = len(lut) // 3 + for i in range(3): + colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]] + colormap += [0] * (256 - colors) + ifd[COLORMAP] = colormap + # data orientation + stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8) + # aim for given strip size (64 KB by default) when using libtiff writer + if libtiff: + im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE) + rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, im.size[1]) + # JPEG encoder expects multiple of 8 rows + if compression == "jpeg": + rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, im.size[1]) + else: + rows_per_strip = im.size[1] + if rows_per_strip == 0: + rows_per_strip = 1 + strip_byte_counts = 1 if stride == 0 else stride * rows_per_strip + strips_per_image = (im.size[1] + rows_per_strip - 1) // rows_per_strip + ifd[ROWSPERSTRIP] = rows_per_strip + if strip_byte_counts >= 2**16: + ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG + ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + ( + stride * im.size[1] - strip_byte_counts * (strips_per_image - 1), + ) + ifd[STRIPOFFSETS] = tuple( + range(0, strip_byte_counts * strips_per_image, strip_byte_counts) + ) # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if im.mode == "YCbCr": + for tag, value in { + YCBCRSUBSAMPLING: (1, 1), + REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255), + }.items(): + ifd.setdefault(tag, value) + + blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS] + if libtiff: + if "quality" in encoderinfo: + quality = encoderinfo["quality"] + if not isinstance(quality, int) or quality < 0 or quality > 100: + raise ValueError("Invalid quality setting") + if compression != "jpeg": + raise ValueError( + "quality setting only supported for 'jpeg' compression" + ) + ifd[JPEGQUALITY] = quality + + logger.debug("Saving using libtiff encoder") + logger.debug("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # optional types for non core tags + types = {} + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + # The other tags expect arrays with a certain length (fixed or depending on + # BITSPERSAMPLE, etc), passing arrays with a different length will result in + # segfaults. Block these tags until we add extra validation. + # SUBIFD may also cause a segfault. + blocklist += [ + REFERENCEBLACKWHITE, + STRIPBYTECOUNTS, + STRIPOFFSETS, + TRANSFERFUNCTION, + SUBIFD, + ] + + # bits per sample is a single short in the tiff directory, not a list. + atts = {BITSPERSAMPLE: bits[0]} + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, "tag"): + legacy_ifd = im.tag.to_v2() + + # SAMPLEFORMAT is determined by the image format and should not be copied + # from legacy_ifd. + supplied_tags = {**getattr(im, "tag_v2", {}), **legacy_ifd} + if SAMPLEFORMAT in supplied_tags: + del supplied_tags[SAMPLEFORMAT] + + for tag, value in itertools.chain(ifd.items(), supplied_tags.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. + # Custom items are supported for int, float, unicode, string and byte + # values. Other types and tuples require a tagtype. + if tag not in TiffTags.LIBTIFF_CORE: + if not Image.core.libtiff_support_custom_tags: + continue + + if tag in ifd.tagtype: + types[tag] = ifd.tagtype[tag] + elif not (isinstance(value, (int, float, str, bytes))): + continue + else: + type = TiffTags.lookup(tag).type + if type: + types[tag] = type + if tag not in atts and tag not in blocklist: + if isinstance(value, str): + atts[tag] = value.encode("ascii", "replace") + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1: + atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0] + + logger.debug("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ("I;16B", "I;16"): + rawmode = "I;16N" + + # Pass tags as sorted list so that the tags are set in a fixed order. + # This is required by libtiff for some tags. For example, the JPEGQUALITY + # pseudo tag requires that the COMPRESS tag was already set. + tags = list(atts.items()) + tags.sort() + a = (rawmode, compression, _fp, filename, tags, types) + e = Image._getencoder(im.mode, "libtiff", a, encoderconfig) + e.setimage(im.im, (0, 0) + im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16 * 1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise OSError(f"encoder error {s} when writing image file") + + else: + for tag in blocklist: + del ifd[tag] + offset = ifd.save(fp) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))] + ) + + # -- helper for multi-page save -- + if "_debug_multipage" in encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + + +class AppendingTiffWriter: + fieldSizes = [ + 0, # None + 1, # byte + 1, # ascii + 2, # short + 4, # long + 8, # rational + 1, # sbyte + 1, # undefined + 2, # sshort + 4, # slong + 8, # srational + 4, # float + 8, # double + ] + + # StripOffsets = 273 + # FreeOffsets = 288 + # TileOffsets = 324 + # JPEGQTables = 519 + # JPEGDCTables = 520 + # JPEGACTables = 521 + Tags = {273, 288, 324, 519, 520, 521} + + def __init__(self, fn, new=False): + if hasattr(fn, "read"): + self.f = fn + self.close_fp = False + else: + self.name = fn + self.close_fp = True + try: + self.f = open(fn, "w+b" if new else "r+b") + except OSError: + self.f = open(fn, "w+b") + self.beginning = self.f.tell() + self.setup() + + def setup(self): + # Reset everything. + self.f.seek(self.beginning, os.SEEK_SET) + + self.whereToWriteNewIFDOffset = None + self.offsetOfNewPage = 0 + + self.IIMM = iimm = self.f.read(4) + if not iimm: + # empty file - first page + self.isFirst = True + return + + self.isFirst = False + if iimm == b"II\x2a\x00": + self.setEndian("<") + elif iimm == b"MM\x00\x2a": + self.setEndian(">") + else: + raise RuntimeError("Invalid TIFF file header") + + self.skipIFDs() + self.goToEnd() + + def finalize(self): + if self.isFirst: + return + + # fix offsets + self.f.seek(self.offsetOfNewPage) + + iimm = self.f.read(4) + if not iimm: + # raise RuntimeError("nothing written into new page") + # Make it easy to finish a frame without committing to a new one. + return + + if iimm != self.IIMM: + raise RuntimeError("IIMM of new page doesn't match IIMM of first page") + + ifd_offset = self.readLong() + ifd_offset += self.offsetOfNewPage + self.f.seek(self.whereToWriteNewIFDOffset) + self.writeLong(ifd_offset) + self.f.seek(ifd_offset) + self.fixIFD() + + def newFrame(self): + # Call this to finish a frame. + self.finalize() + self.setup() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if self.close_fp: + self.close() + return False + + def tell(self): + return self.f.tell() - self.offsetOfNewPage + + def seek(self, offset, whence=io.SEEK_SET): + if whence == os.SEEK_SET: + offset += self.offsetOfNewPage + + self.f.seek(offset, whence) + return self.tell() + + def goToEnd(self): + self.f.seek(0, os.SEEK_END) + pos = self.f.tell() + + # pad to 16 byte boundary + pad_bytes = 16 - pos % 16 + if 0 < pad_bytes < 16: + self.f.write(bytes(pad_bytes)) + self.offsetOfNewPage = self.f.tell() + + def setEndian(self, endian): + self.endian = endian + self.longFmt = self.endian + "L" + self.shortFmt = self.endian + "H" + self.tagFormat = self.endian + "HHL" + + def skipIFDs(self): + while True: + ifd_offset = self.readLong() + if ifd_offset == 0: + self.whereToWriteNewIFDOffset = self.f.tell() - 4 + break + + self.f.seek(ifd_offset) + num_tags = self.readShort() + self.f.seek(num_tags * 12, os.SEEK_CUR) + + def write(self, data): + return self.f.write(data) + + def readShort(self): + (value,) = struct.unpack(self.shortFmt, self.f.read(2)) + return value + + def readLong(self): + (value,) = struct.unpack(self.longFmt, self.f.read(4)) + return value + + def rewriteLastShortToLong(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + raise RuntimeError(f"wrote only {bytes_written} bytes but wanted 4") + + def rewriteLastShort(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.shortFmt, value)) + if bytes_written is not None and bytes_written != 2: + raise RuntimeError(f"wrote only {bytes_written} bytes but wanted 2") + + def rewriteLastLong(self, value): + self.f.seek(-4, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + raise RuntimeError(f"wrote only {bytes_written} bytes but wanted 4") + + def writeShort(self, value): + bytes_written = self.f.write(struct.pack(self.shortFmt, value)) + if bytes_written is not None and bytes_written != 2: + raise RuntimeError(f"wrote only {bytes_written} bytes but wanted 2") + + def writeLong(self, value): + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + raise RuntimeError(f"wrote only {bytes_written} bytes but wanted 4") + + def close(self): + self.finalize() + self.f.close() + + def fixIFD(self): + num_tags = self.readShort() + + for i in range(num_tags): + tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8)) + + field_size = self.fieldSizes[field_type] + total_size = field_size * count + is_local = total_size <= 4 + if not is_local: + offset = self.readLong() + offset += self.offsetOfNewPage + self.rewriteLastLong(offset) + + if tag in self.Tags: + cur_pos = self.f.tell() + + if is_local: + self.fixOffsets( + count, isShort=(field_size == 2), isLong=(field_size == 4) + ) + self.f.seek(cur_pos + 4) + else: + self.f.seek(offset) + self.fixOffsets( + count, isShort=(field_size == 2), isLong=(field_size == 4) + ) + self.f.seek(cur_pos) + + offset = cur_pos = None + + elif is_local: + # skip the locally stored value that is not an offset + self.f.seek(4, os.SEEK_CUR) + + def fixOffsets(self, count, isShort=False, isLong=False): + if not isShort and not isLong: + raise RuntimeError("offset is neither short nor long") + + for i in range(count): + offset = self.readShort() if isShort else self.readLong() + offset += self.offsetOfNewPage + if isShort and offset >= 65536: + # offset is now too large - we must convert shorts to longs + if count != 1: + raise RuntimeError("not implemented") # XXX TODO + + # simple case - the offset is just one and therefore it is + # local (not referenced with another offset) + self.rewriteLastShortToLong(offset) + self.f.seek(-10, os.SEEK_CUR) + self.writeShort(TiffTags.LONG) # rewrite the type to LONG + self.f.seek(8, os.SEEK_CUR) + elif isShort: + self.rewriteLastShort(offset) + else: + self.rewriteLastLong(offset) + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + encoderconfig = im.encoderconfig + append_images = list(encoderinfo.get("append_images", [])) + if not hasattr(im, "n_frames") and not append_images: + return _save(im, fp, filename) + + cur_idx = im.tell() + try: + with AppendingTiffWriter(fp) as tf: + for ims in [im] + append_images: + ims.encoderinfo = encoderinfo + ims.encoderconfig = encoderconfig + if not hasattr(ims, "n_frames"): + nfr = 1 + else: + nfr = ims.n_frames + + for idx in range(nfr): + ims.seek(idx) + ims.load() + _save(ims, tf, filename) + tf.newFrame() + finally: + im.seek(cur_idx) + + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) +Image.register_save_all(TiffImageFile.format, _save_all) + +Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"]) + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/sbsheriff/Lib/site-packages/PIL/TiffTags.py b/sbsheriff/Lib/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000..9b52771 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/TiffTags.py @@ -0,0 +1,559 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None): + return super().__new__(cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + # Using get will call hash(value), which can be expensive + # for some types (e.g. Fraction). Since self.enum is rarely + # used, it's usually better to test it first. + return self.enum.get(value, value) if self.enum else value + + +def lookup(tag, group=None): + """ + :param tag: Integer tag number + :param group: Which :py:data:`~PIL.TiffTags.TAGS_V2_GROUPS` to look in + + .. versionadded:: 8.3.0 + + :returns: Taginfo namedtuple, From the ``TAGS_V2`` info if possible, + otherwise just populating the value and name from ``TAGS``. + If the tag is not recognized, "unknown" is returned for the name + + """ + + if group is not None: + info = TAGS_V2_GROUPS[group].get(tag) if group in TAGS_V2_GROUPS else None + else: + info = TAGS_V2.get(tag) + return info or TagInfo(tag, TAGS.get(tag, "unknown")) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +SIGNED_BYTE = 6 +UNDEFINED = 7 +SIGNED_SHORT = 8 +SIGNED_LONG = 9 +SIGNED_RATIONAL = 10 +FLOAT = 11 +DOUBLE = 12 +IFD = 13 +LONG8 = 16 + +TAGS_V2 = { + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ( + "Compression", + SHORT, + 1, + { + "Uncompressed": 1, + "CCITT 1d": 2, + "Group 3 Fax": 3, + "Group 4 Fax": 4, + "LZW": 5, + "JPEG": 6, + "PackBits": 32773, + }, + ), + 262: ( + "PhotometricInterpretation", + SHORT, + 1, + { + "WhiteIsZero": 0, + "BlackIsZero": 1, + "RGB": 2, + "RGB Palette": 3, + "Transparency Mask": 4, + "CMYK": 5, + "YCbCr": 6, + "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892, # Adobe DNG + }, + ), + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + 280: ("MinSampleValue", SHORT, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", RATIONAL, 6), + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + 330: ("SubIFDs", LONG, 0), + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + 347: ("JPEGTables", UNDEFINED, 1), + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", RATIONAL, 6), + 700: ("XMP", BYTE, 0), + 33432: ("Copyright", ASCII, 1), + 33723: ("IptcNaaInfo", UNDEFINED, 1), + 34377: ("PhotoshopInfo", BYTE, 0), + # FIXME add more tags here + 34665: ("ExifIFD", LONG, 1), + 34675: ("ICCProfile", UNDEFINED, 1), + 34853: ("GPSInfoIFD", LONG, 1), + 36864: ("ExifVersion", UNDEFINED, 1), + 40965: ("InteroperabilityIFD", LONG, 1), + 41730: ("CFAPattern", UNDEFINED, 1), + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + 40960: ("FlashPixVersion", UNDEFINED, 1), + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one + 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006 +} +TAGS_V2_GROUPS = { + # ExifIFD + 34665: { + 36864: ("ExifVersion", UNDEFINED, 1), + 40960: ("FlashPixVersion", UNDEFINED, 1), + 40965: ("InteroperabilityIFD", LONG, 1), + 41730: ("CFAPattern", UNDEFINED, 1), + }, + # GPSInfoIFD + 34853: { + 0: ("GPSVersionID", BYTE, 4), + 1: ("GPSLatitudeRef", ASCII, 2), + 2: ("GPSLatitude", RATIONAL, 3), + 3: ("GPSLongitudeRef", ASCII, 2), + 4: ("GPSLongitude", RATIONAL, 3), + 5: ("GPSAltitudeRef", BYTE, 1), + 6: ("GPSAltitude", RATIONAL, 1), + 7: ("GPSTimeStamp", RATIONAL, 3), + 8: ("GPSSatellites", ASCII, 0), + 9: ("GPSStatus", ASCII, 2), + 10: ("GPSMeasureMode", ASCII, 2), + 11: ("GPSDOP", RATIONAL, 1), + 12: ("GPSSpeedRef", ASCII, 2), + 13: ("GPSSpeed", RATIONAL, 1), + 14: ("GPSTrackRef", ASCII, 2), + 15: ("GPSTrack", RATIONAL, 1), + 16: ("GPSImgDirectionRef", ASCII, 2), + 17: ("GPSImgDirection", RATIONAL, 1), + 18: ("GPSMapDatum", ASCII, 0), + 19: ("GPSDestLatitudeRef", ASCII, 2), + 20: ("GPSDestLatitude", RATIONAL, 3), + 21: ("GPSDestLongitudeRef", ASCII, 2), + 22: ("GPSDestLongitude", RATIONAL, 3), + 23: ("GPSDestBearingRef", ASCII, 2), + 24: ("GPSDestBearing", RATIONAL, 1), + 25: ("GPSDestDistanceRef", ASCII, 2), + 26: ("GPSDestDistance", RATIONAL, 1), + 27: ("GPSProcessingMethod", UNDEFINED, 0), + 28: ("GPSAreaInformation", UNDEFINED, 0), + 29: ("GPSDateStamp", ASCII, 11), + 30: ("GPSDifferential", SHORT, 1), + }, + # InteroperabilityIFD + 40965: {1: ("InteropIndex", ASCII, 1), 2: ("InteropVersion", UNDEFINED, 1)}, +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = { + 347: "JPEGTables", + 700: "XMP", + # Additional Exif Info + 32932: "Wang Annotation", + 33434: "ExposureTime", + 33437: "FNumber", + 33445: "MD FileTag", + 33446: "MD ScalePixel", + 33447: "MD ColorTable", + 33448: "MD LabName", + 33449: "MD SampleInfo", + 33450: "MD PrepDate", + 33451: "MD PrepTime", + 33452: "MD FileUnits", + 33550: "ModelPixelScaleTag", + 33723: "IptcNaaInfo", + 33918: "INGR Packet Data Tag", + 33919: "INGR Flag Registers", + 33920: "IrasB Transformation Matrix", + 33922: "ModelTiepointTag", + 34264: "ModelTransformationTag", + 34377: "PhotoshopInfo", + 34735: "GeoKeyDirectoryTag", + 34736: "GeoDoubleParamsTag", + 34737: "GeoAsciiParamsTag", + 34850: "ExposureProgram", + 34852: "SpectralSensitivity", + 34855: "ISOSpeedRatings", + 34856: "OECF", + 34864: "SensitivityType", + 34865: "StandardOutputSensitivity", + 34866: "RecommendedExposureIndex", + 34867: "ISOSpeed", + 34868: "ISOSpeedLatitudeyyy", + 34869: "ISOSpeedLatitudezzz", + 34908: "HylaFAX FaxRecvParams", + 34909: "HylaFAX FaxSubAddress", + 34910: "HylaFAX FaxRecvTime", + 36864: "ExifVersion", + 36867: "DateTimeOriginal", + 36868: "DateTImeDigitized", + 37121: "ComponentsConfiguration", + 37122: "CompressedBitsPerPixel", + 37724: "ImageSourceData", + 37377: "ShutterSpeedValue", + 37378: "ApertureValue", + 37379: "BrightnessValue", + 37380: "ExposureBiasValue", + 37381: "MaxApertureValue", + 37382: "SubjectDistance", + 37383: "MeteringMode", + 37384: "LightSource", + 37385: "Flash", + 37386: "FocalLength", + 37396: "SubjectArea", + 37500: "MakerNote", + 37510: "UserComment", + 37520: "SubSec", + 37521: "SubSecTimeOriginal", + 37522: "SubsecTimeDigitized", + 40960: "FlashPixVersion", + 40961: "ColorSpace", + 40962: "PixelXDimension", + 40963: "PixelYDimension", + 40964: "RelatedSoundFile", + 40965: "InteroperabilityIFD", + 41483: "FlashEnergy", + 41484: "SpatialFrequencyResponse", + 41486: "FocalPlaneXResolution", + 41487: "FocalPlaneYResolution", + 41488: "FocalPlaneResolutionUnit", + 41492: "SubjectLocation", + 41493: "ExposureIndex", + 41495: "SensingMethod", + 41728: "FileSource", + 41729: "SceneType", + 41730: "CFAPattern", + 41985: "CustomRendered", + 41986: "ExposureMode", + 41987: "WhiteBalance", + 41988: "DigitalZoomRatio", + 41989: "FocalLengthIn35mmFilm", + 41990: "SceneCaptureType", + 41991: "GainControl", + 41992: "Contrast", + 41993: "Saturation", + 41994: "Sharpness", + 41995: "DeviceSettingDescription", + 41996: "SubjectDistanceRange", + 42016: "ImageUniqueID", + 42032: "CameraOwnerName", + 42033: "BodySerialNumber", + 42034: "LensSpecification", + 42035: "LensMake", + 42036: "LensModel", + 42037: "LensSerialNumber", + 42112: "GDAL_METADATA", + 42113: "GDAL_NODATA", + 42240: "Gamma", + 50215: "Oce Scanjob Description", + 50216: "Oce Application Selector", + 50217: "Oce Identification Number", + 50218: "Oce ImageLogic Characteristics", + # Adobe DNG + 50706: "DNGVersion", + 50707: "DNGBackwardVersion", + 50708: "UniqueCameraModel", + 50709: "LocalizedCameraModel", + 50710: "CFAPlaneColor", + 50711: "CFALayout", + 50712: "LinearizationTable", + 50713: "BlackLevelRepeatDim", + 50714: "BlackLevel", + 50715: "BlackLevelDeltaH", + 50716: "BlackLevelDeltaV", + 50717: "WhiteLevel", + 50718: "DefaultScale", + 50719: "DefaultCropOrigin", + 50720: "DefaultCropSize", + 50721: "ColorMatrix1", + 50722: "ColorMatrix2", + 50723: "CameraCalibration1", + 50724: "CameraCalibration2", + 50725: "ReductionMatrix1", + 50726: "ReductionMatrix2", + 50727: "AnalogBalance", + 50728: "AsShotNeutral", + 50729: "AsShotWhiteXY", + 50730: "BaselineExposure", + 50731: "BaselineNoise", + 50732: "BaselineSharpness", + 50733: "BayerGreenSplit", + 50734: "LinearResponseLimit", + 50735: "CameraSerialNumber", + 50736: "LensInfo", + 50737: "ChromaBlurRadius", + 50738: "AntiAliasStrength", + 50740: "DNGPrivateData", + 50778: "CalibrationIlluminant1", + 50779: "CalibrationIlluminant2", + 50784: "Alias Layer Metadata", +} + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + + for group, tags in TAGS_V2_GROUPS.items(): + for k, v in tags.items(): + tags[k] = TagInfo(k, *v) + + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# Following pseudo-tags are also handled by default in libtiff: +# TIFFTAG_JPEGQUALITY 65537 + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +# This list also exists in encode.c +LIBTIFF_CORE = { + 255, + 256, + 257, + 258, + 259, + 262, + 263, + 266, + 274, + 277, + 278, + 280, + 281, + 340, + 341, + 282, + 283, + 284, + 286, + 287, + 296, + 297, + 321, + 320, + 338, + 32995, + 322, + 323, + 32998, + 32996, + 339, + 32997, + 330, + 531, + 530, + 301, + 532, + 333, + # as above + 269, # this has been in our tests forever, and works + 65537, +} + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/sbsheriff/Lib/site-packages/PIL/WalImageFile.py b/sbsheriff/Lib/site-packages/PIL/WalImageFile.py new file mode 100644 index 0000000..0dc695a --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/WalImageFile.py @@ -0,0 +1,124 @@ +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +This reader is based on the specification available from: +https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +and has been tested with a few sample files found using google. + +.. note:: + This format cannot be automatically recognized, so the reader + is not registered for use with :py:func:`PIL.Image.open()`. + To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead. +""" + +from . import Image, ImageFile +from ._binary import i32le as i32 + + +class WalImageFile(ImageFile.ImageFile): + + format = "WAL" + format_description = "Quake2 Texture" + + def _open(self): + self.mode = "P" + + # read header fields + header = self.fp.read(32 + 24 + 32 + 12) + self._size = i32(header, 32), i32(header, 36) + Image._decompression_bomb_check(self.size) + + # load pixel data + offset = i32(header, 40) + self.fp.seek(offset) + + # strings are null-terminated + self.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56 : 56 + 32].split(b"\0", 1)[0] + if next_name: + self.info["next_name"] = next_name + + def load(self): + if not self.im: + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self.size[0] * self.size[1])) + self.putpalette(quake2palette) + return Image.Image.load(self) + + +def open(filename): + """ + Load texture from a Quake2 WAL texture file. + + By default, a Quake2 standard palette is attached to the texture. + To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. + + :param filename: WAL file name, or an opened file handle. + :returns: An image instance. + """ + return WalImageFile(filename) + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/sbsheriff/Lib/site-packages/PIL/WebPImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..5eaeb10 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/WebPImagePlugin.py @@ -0,0 +1,354 @@ +from io import BytesIO + +from . import Image, ImageFile + +try: + from . import _webp + + SUPPORTED = True +except ImportError: + SUPPORTED = False + + +_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True} + +_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True} + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless +} + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + if is_riff_file_format and is_webp_file and is_valid_vp8_mode: + if not SUPPORTED: + return ( + "image file could not be identified because WEBP support not installed" + ) + return True + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + __loaded = 0 + __logical_frame = 0 + + def _open(self): + if not _webp.HAVE_WEBPANIM: + # Legacy mode + data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode( + self.fp.read() + ) + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + self._size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + self.n_frames = 1 + self.is_animated = False + return + + # Use the newer AnimDecoder API to parse the (possibly) animated file, + # and access muxed chunks like ICC/EXIF/XMP. + self._decoder = _webp.WebPAnimDecoder(self.fp.read()) + + # Get info from decoder + width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info() + self._size = width, height + self.info["loop"] = loop_count + bg_a, bg_r, bg_g, bg_b = ( + (bgcolor >> 24) & 0xFF, + (bgcolor >> 16) & 0xFF, + (bgcolor >> 8) & 0xFF, + bgcolor & 0xFF, + ) + self.info["background"] = (bg_r, bg_g, bg_b, bg_a) + self.n_frames = frame_count + self.is_animated = self.n_frames > 1 + self.mode = "RGB" if mode == "RGBX" else mode + self.rawmode = mode + self.tile = [] + + # Attempt to read ICC / EXIF / XMP chunks from file + icc_profile = self._decoder.get_chunk("ICCP") + exif = self._decoder.get_chunk("EXIF") + xmp = self._decoder.get_chunk("XMP ") + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + if xmp: + self.info["xmp"] = xmp + + # Initialize seek state + self._reset(reset=False) + + def _getexif(self): + if "exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + def seek(self, frame): + if not self._seek_check(frame): + return + + # Set logical frame to requested position + self.__logical_frame = frame + + def _reset(self, reset=True): + if reset: + self._decoder.reset() + self.__physical_frame = 0 + self.__loaded = -1 + self.__timestamp = 0 + + def _get_next(self): + # Get next frame + ret = self._decoder.get_next() + self.__physical_frame += 1 + + # Check if an error occurred + if ret is None: + self._reset() # Reset just to be safe + self.seek(0) + raise EOFError("failed to decode next frame in WebP file") + + # Compute duration + data, timestamp = ret + duration = timestamp - self.__timestamp + self.__timestamp = timestamp + + # libwebp gives frame end, adjust to start of frame + timestamp -= duration + return data, timestamp, duration + + def _seek(self, frame): + if self.__physical_frame == frame: + return # Nothing to do + if frame < self.__physical_frame: + self._reset() # Rewind to beginning + while self.__physical_frame < frame: + self._get_next() # Advance to the requested frame + + def load(self): + if _webp.HAVE_WEBPANIM: + if self.__loaded != self.__logical_frame: + self._seek(self.__logical_frame) + + # We need to load the image data for this frame + data, timestamp, duration = self._get_next() + self.info["timestamp"] = timestamp + self.info["duration"] = duration + self.__loaded = self.__logical_frame + + # Set tile + if self.fp and self._exclusive_fp: + self.fp.close() + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] + + return super().load() + + def tell(self): + if not _webp.HAVE_WEBPANIM: + return super().tell() + + return self.__logical_frame + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + append_images = list(encoderinfo.get("append_images", [])) + + # If total frame count is 1, then save using the legacy API, which + # will preserve non-alpha modes + total = 0 + for ims in [im] + append_images: + total += getattr(ims, "n_frames", 1) + if total == 1: + _save(im, fp, filename) + return + + background = (0, 0, 0, 0) + if "background" in encoderinfo: + background = encoderinfo["background"] + elif "background" in im.info: + background = im.info["background"] + if isinstance(background, int): + # GifImagePlugin stores a global color table index in + # info["background"]. So it must be converted to an RGBA value + palette = im.getpalette() + if palette: + r, g, b = palette[background * 3 : (background + 1) * 3] + background = (r, g, b, 255) + else: + background = (background, background, background, 255) + + duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) + loop = im.encoderinfo.get("loop", 0) + minimize_size = im.encoderinfo.get("minimize_size", False) + kmin = im.encoderinfo.get("kmin", None) + kmax = im.encoderinfo.get("kmax", None) + allow_mixed = im.encoderinfo.get("allow_mixed", False) + verbose = False + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + method = im.encoderinfo.get("method", 0) + icc_profile = im.encoderinfo.get("icc_profile") or "" + exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + xmp = im.encoderinfo.get("xmp", "") + if allow_mixed: + lossless = False + + # Sensible keyframe defaults are from gif2webp.c script + if kmin is None: + kmin = 9 if lossless else 3 + if kmax is None: + kmax = 17 if lossless else 5 + + # Validate background color + if ( + not isinstance(background, (list, tuple)) + or len(background) != 4 + or not all(0 <= v < 256 for v in background) + ): + raise OSError( + f"Background color is not an RGBA tuple clamped to (0-255): {background}" + ) + + # Convert to packed uint + bg_r, bg_g, bg_b, bg_a = background + background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0) + + # Setup the WebP animation encoder + enc = _webp.WebPAnimEncoder( + im.size[0], + im.size[1], + background, + loop, + minimize_size, + kmin, + kmax, + allow_mixed, + verbose, + ) + + # Add each frame + frame_idx = 0 + timestamp = 0 + cur_idx = im.tell() + try: + for ims in [im] + append_images: + # Get # of frames in this image + nfr = getattr(ims, "n_frames", 1) + + for idx in range(nfr): + ims.seek(idx) + ims.load() + + # Make sure image mode is supported + frame = ims + rawmode = ims.mode + if ims.mode not in _VALID_WEBP_MODES: + alpha = ( + "A" in ims.mode + or "a" in ims.mode + or (ims.mode == "P" and "A" in ims.im.getpalettemode()) + ) + rawmode = "RGBA" if alpha else "RGB" + frame = ims.convert(rawmode) + + if rawmode == "RGB": + # For faster conversion, use RGBX + rawmode = "RGBX" + + # Append the frame to the animation encoder + enc.add( + frame.tobytes("raw", rawmode), + timestamp, + frame.size[0], + frame.size[1], + rawmode, + lossless, + quality, + method, + ) + + # Update timestamp and frame index + if isinstance(duration, (list, tuple)): + timestamp += duration[frame_idx] + else: + timestamp += duration + frame_idx += 1 + + finally: + im.seek(cur_idx) + + # Force encoder to flush frames + enc.add(None, timestamp, 0, 0, "", lossless, quality, 0) + + # Get the final output from the encoder + data = enc.assemble(icc_profile, exif, xmp) + if data is None: + raise OSError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +def _save(im, fp, filename): + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile") or "" + exif = im.encoderinfo.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + xmp = im.encoderinfo.get("xmp", "") + method = im.encoderinfo.get("method", 4) + + if im.mode not in _VALID_WEBP_LEGACY_MODES: + alpha = ( + "A" in im.mode + or "a" in im.mode + or (im.mode == "P" and "transparency" in im.info) + ) + im = im.convert("RGBA" if alpha else "RGB") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + method, + exif, + xmp, + ) + if data is None: + raise OSError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +if SUPPORTED: + Image.register_save(WebPImageFile.format, _save) + if _webp.HAVE_WEBPANIM: + Image.register_save_all(WebPImageFile.format, _save_all) + Image.register_extension(WebPImageFile.format, ".webp") + Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/sbsheriff/Lib/site-packages/PIL/WmfImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..2f54cde --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/WmfImagePlugin.py @@ -0,0 +1,177 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +# WMF/EMF reference documentation: +# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf +# http://wvware.sourceforge.net/caolan/index.html +# http://wvware.sourceforge.net/caolan/ora-wmf.html + +from . import Image, ImageFile +from ._binary import i16le as word +from ._binary import si16le as short +from ._binary import si32le as _long + +_handler = None + + +def register_handler(handler): + """ + Install application-specific WMF image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler: + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", + im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", + "BGR", + (im.size[0] * 3 + 3) & -4, + -1, + ) + + register_handler(WmfHandler()) + +# +# -------------------------------------------------------------------- +# Read WMF file + + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + self._inch = None + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + self._inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + self.info["dpi"] = 72 + size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = _long(s, 8) + y0 = _long(s, 12) + x1 = _long(s, 16) + y1 = _long(s, 20) + + # get frame (in 0.01 millimeter units) + frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) + + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0]) + ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self._size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + def load(self, dpi=None): + if dpi is not None and self._inch is not None: + self.info["dpi"] = dpi + x0, y0, x1, y1 = self.info["wmf_bbox"] + self._size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + return super().load() + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise OSError("WMF save handler not installed") + _handler.save(im, fp, filename) + + +# +# -------------------------------------------------------------------- +# Registry stuff + + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"]) diff --git a/sbsheriff/Lib/site-packages/PIL/XVThumbImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..4efedb7 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/XVThumbImagePlugin.py @@ -0,0 +1,78 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + ( + o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) + ) + + +def _accept(prefix): + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + if not _accept(self.fp.read(6)): + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != 35: # ie. when not a comment: '#' + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self._size = int(s[0]), int(s[1]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] + + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/sbsheriff/Lib/site-packages/PIL/XbmImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..59acabe --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/XbmImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re + +from . import Image, ImageFile + +# XBM header +xbm_head = re.compile( + rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + rb"[\000-\377]*_bits\[]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if not m: + raise SyntaxError("not a XBM file") + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) + + self.mode = "1" + self._size = xsize, ysize + + self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise OSError(f"cannot write mode {im.mode} as XBM") + + fp.write(f"#define im_width {im.size[0]}\n".encode("ascii")) + fp.write(f"#define im_height {im.size[1]}\n".encode("ascii")) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii")) + fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii")) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/sbsheriff/Lib/site-packages/PIL/XpmImagePlugin.py b/sbsheriff/Lib/site-packages/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..aaed203 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +# XPM header +xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)') + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self._size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for _ in range(pal): + + s = self.fp.readline() + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] in b"\r\n": + s = s[:-1] + + c = s[1] + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i + 1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = ( + o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255) + ) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize) + + return b"".join(s) + + +# +# Registry + + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/sbsheriff/Lib/site-packages/PIL/__init__.py b/sbsheriff/Lib/site-packages/PIL/__init__.py new file mode 100644 index 0000000..e65b155 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/__init__.py @@ -0,0 +1,80 @@ +"""Pillow (Fork of the Python Imaging Library) + +Pillow is the friendly PIL fork by Alex Clark and Contributors. + https://github.com/python-pillow/Pillow/ + +Pillow is forked from PIL 1.1.7. + +PIL is the Python Imaging Library by Fredrik Lundh and Contributors. +Copyright (c) 1999 by Secret Labs AB. + +Use PIL.__version__ for this Pillow version. + +;-) +""" + +from . import _version + +# VERSION was removed in Pillow 6.0.0. +# PILLOW_VERSION was removed in Pillow 9.0.0. +# Use __version__ instead. +__version__ = _version.__version__ +del _version + + +_plugins = [ + "BlpImagePlugin", + "BmpImagePlugin", + "BufrStubImagePlugin", + "CurImagePlugin", + "DcxImagePlugin", + "DdsImagePlugin", + "EpsImagePlugin", + "FitsImagePlugin", + "FitsStubImagePlugin", + "FliImagePlugin", + "FpxImagePlugin", + "FtexImagePlugin", + "GbrImagePlugin", + "GifImagePlugin", + "GribStubImagePlugin", + "Hdf5StubImagePlugin", + "IcnsImagePlugin", + "IcoImagePlugin", + "ImImagePlugin", + "ImtImagePlugin", + "IptcImagePlugin", + "JpegImagePlugin", + "Jpeg2KImagePlugin", + "McIdasImagePlugin", + "MicImagePlugin", + "MpegImagePlugin", + "MpoImagePlugin", + "MspImagePlugin", + "PalmImagePlugin", + "PcdImagePlugin", + "PcxImagePlugin", + "PdfImagePlugin", + "PixarImagePlugin", + "PngImagePlugin", + "PpmImagePlugin", + "PsdImagePlugin", + "SgiImagePlugin", + "SpiderImagePlugin", + "SunImagePlugin", + "TgaImagePlugin", + "TiffImagePlugin", + "WebPImagePlugin", + "WmfImagePlugin", + "XbmImagePlugin", + "XpmImagePlugin", + "XVThumbImagePlugin", +] + + +class UnidentifiedImageError(OSError): + """ + Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified. + """ + + pass diff --git a/sbsheriff/Lib/site-packages/PIL/__main__.py b/sbsheriff/Lib/site-packages/PIL/__main__.py new file mode 100644 index 0000000..a05323f --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/__main__.py @@ -0,0 +1,3 @@ +from .features import pilinfo + +pilinfo() diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-310.pyc new file mode 100644 index 0000000..04670dc Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..6cede29 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..17c6059 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..4f9c51e Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-310.pyc new file mode 100644 index 0000000..9df5ccb Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..fa1e7d0 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..e3eac43 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..66a270a Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..f473e35 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-310.pyc new file mode 100644 index 0000000..e3afc27 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..0869279 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..080b7b7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..483a0aa Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FliImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FontFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FontFile.cpython-310.pyc new file mode 100644 index 0000000..715dd11 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FontFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..7414bd1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..a52ef40 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..950bae9 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-310.pyc new file mode 100644 index 0000000..81d925f Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GdImageFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..d0cf895 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GifImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-310.pyc new file mode 100644 index 0000000..475a350 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpGradientFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-310.pyc new file mode 100644 index 0000000..8016ef5 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..c32f2f1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..8af54d6 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..e112cd6 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..47e6fe1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..a4c0b82 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/Image.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/Image.cpython-310.pyc new file mode 100644 index 0000000..4091d1e Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/Image.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-310.pyc new file mode 100644 index 0000000..303b80d Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageChops.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-310.pyc new file mode 100644 index 0000000..7457d08 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageCms.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-310.pyc new file mode 100644 index 0000000..2b1d5af Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageColor.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-310.pyc new file mode 100644 index 0000000..a58b69f Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-310.pyc new file mode 100644 index 0000000..2de7d80 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageDraw2.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-310.pyc new file mode 100644 index 0000000..60b4c83 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageEnhance.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-310.pyc new file mode 100644 index 0000000..d3b8d56 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-310.pyc new file mode 100644 index 0000000..78f88cd Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFilter.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-310.pyc new file mode 100644 index 0000000..0d53b98 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageFont.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-310.pyc new file mode 100644 index 0000000..7a7304d Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageGrab.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-310.pyc new file mode 100644 index 0000000..3ab9d30 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMath.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-310.pyc new file mode 100644 index 0000000..dd63836 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMode.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-310.pyc new file mode 100644 index 0000000..4e79a3a Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageMorph.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-310.pyc new file mode 100644 index 0000000..4671def Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageOps.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-310.pyc new file mode 100644 index 0000000..5dda457 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePalette.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-310.pyc new file mode 100644 index 0000000..aed4e59 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImagePath.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-310.pyc new file mode 100644 index 0000000..5d792a0 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageQt.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-310.pyc new file mode 100644 index 0000000..320f338 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageSequence.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-310.pyc new file mode 100644 index 0000000..538e310 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageShow.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-310.pyc new file mode 100644 index 0000000..85ef521 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageStat.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-310.pyc new file mode 100644 index 0000000..f88dedc Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTk.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-310.pyc new file mode 100644 index 0000000..cb319fa Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageTransform.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-310.pyc new file mode 100644 index 0000000..c16324f Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImageWin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..8864078 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..b8dde97 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..ad2b83c Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..362cf5a Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-310.pyc new file mode 100644 index 0000000..719b9a3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/JpegPresets.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..fabb7af Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..5937edb Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/MicImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..db63f56 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..f9cc94f Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..b522ca9 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/MspImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-310.pyc new file mode 100644 index 0000000..c70f4dc Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PSDraw.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-310.pyc new file mode 100644 index 0000000..4fa1b4f Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PaletteFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..2a5d760 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..318c2f8 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-310.pyc new file mode 100644 index 0000000..d2cf87c Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcfFontFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..6364654 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..530adf7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-310.pyc new file mode 100644 index 0000000..912b5ea Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PdfParser.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..d6b1dd1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..64c3429 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PngImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..57823a3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..37973c7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-310.pyc new file mode 100644 index 0000000..3b10fc8 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/PyAccess.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..be1dbbe Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..833407c Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..6f90ea1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/SunImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/TarIO.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/TarIO.cpython-310.pyc new file mode 100644 index 0000000..5849d09 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/TarIO.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..b0c1c2d Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..2ba257b Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-310.pyc new file mode 100644 index 0000000..11912e5 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/TiffTags.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-310.pyc new file mode 100644 index 0000000..c0faefe Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/WalImageFile.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..b6af1f5 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..3c68577 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..946492e Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..3e1ad93 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-310.pyc new file mode 100644 index 0000000..41053b3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..d524c13 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/__main__.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000..2dc3c0e Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/__main__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/_binary.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/_binary.cpython-310.pyc new file mode 100644 index 0000000..c7646c9 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/_binary.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-310.pyc new file mode 100644 index 0000000..bf3e2b1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/_deprecate.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-310.pyc new file mode 100644 index 0000000..8b4cc56 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/_tkinter_finder.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/_util.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/_util.cpython-310.pyc new file mode 100644 index 0000000..9774bfc Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/_util.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/_version.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000..6dd2d3a Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/_version.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/__pycache__/features.cpython-310.pyc b/sbsheriff/Lib/site-packages/PIL/__pycache__/features.cpython-310.pyc new file mode 100644 index 0000000..d1828bf Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/__pycache__/features.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/PIL/_binary.py b/sbsheriff/Lib/site-packages/PIL/_binary.py new file mode 100644 index 0000000..a74ee9e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/_binary.py @@ -0,0 +1,102 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + + +"""Binary input/output support routines.""" + + +from struct import pack, unpack_from + + +def i8(c): + return c if c.__class__ is int else c[0] + + +def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an unsigned integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from("h", c, o)[0] + + +def i32le(c, o=0): + """ + Converts a 4-bytes (32 bits) string to an unsigned integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from("H", c, o)[0] + + +def i32be(c, o=0): + return unpack_from(">I", c, o)[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) diff --git a/sbsheriff/Lib/site-packages/PIL/_deprecate.py b/sbsheriff/Lib/site-packages/PIL/_deprecate.py new file mode 100644 index 0000000..30a8a89 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/_deprecate.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +import warnings + +from . import __version__ + + +def deprecate( + deprecated: str, + when: int | None, + replacement: str | None = None, + *, + action: str | None = None, + plural: bool = False, +) -> None: + """ + Deprecations helper. + + :param deprecated: Name of thing to be deprecated. + :param when: Pillow major version to be removed in. + :param replacement: Name of replacement. + :param action: Instead of "replacement", give a custom call to action + e.g. "Upgrade to new thing". + :param plural: if the deprecated thing is plural, needing "are" instead of "is". + + Usually of the form: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). + Use [replacement] instead." + + You can leave out the replacement sentence: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)" + + Or with another call to action: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). + [action]." + """ + + is_ = "are" if plural else "is" + + if when is None: + removed = "a future version" + elif when <= int(__version__.split(".")[0]): + raise RuntimeError(f"{deprecated} {is_} deprecated and should be removed.") + elif when == 10: + removed = "Pillow 10 (2023-07-01)" + else: + raise ValueError(f"Unknown removal version, update {__name__}?") + + if replacement and action: + raise ValueError("Use only one of 'replacement' and 'action'") + + if replacement: + action = f". Use {replacement} instead." + elif action: + action = f". {action.rstrip('.')}." + else: + action = "" + + warnings.warn( + f"{deprecated} {is_} deprecated and will be removed in {removed}{action}", + DeprecationWarning, + stacklevel=3, + ) diff --git a/sbsheriff/Lib/site-packages/PIL/_imaging.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imaging.cp310-win_amd64.pyd new file mode 100644 index 0000000..e839a50 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imaging.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_imagingcms.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imagingcms.cp310-win_amd64.pyd new file mode 100644 index 0000000..f9c1e8b Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imagingcms.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_imagingft.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imagingft.cp310-win_amd64.pyd new file mode 100644 index 0000000..606ea55 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imagingft.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_imagingmath.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imagingmath.cp310-win_amd64.pyd new file mode 100644 index 0000000..c483f7e Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imagingmath.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_imagingmorph.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imagingmorph.cp310-win_amd64.pyd new file mode 100644 index 0000000..a45298d Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imagingmorph.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_imagingtk.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_imagingtk.cp310-win_amd64.pyd new file mode 100644 index 0000000..1c804b5 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_imagingtk.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/_tkinter_finder.py b/sbsheriff/Lib/site-packages/PIL/_tkinter_finder.py new file mode 100644 index 0000000..5cd7e9b --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/_tkinter_finder.py @@ -0,0 +1,23 @@ +""" Find compiled module linking to Tcl / Tk libraries +""" +import sys +import tkinter +from tkinter import _tkinter as tk + +from ._deprecate import deprecate + +try: + if hasattr(sys, "pypy_find_executable"): + TKINTER_LIB = tk.tklib_cffi.__file__ + else: + TKINTER_LIB = tk.__file__ +except AttributeError: + # _tkinter may be compiled directly into Python, in which case __file__ is + # not available. load_tkinter_funcs will check the binary first in any case. + TKINTER_LIB = None + +tk_version = str(tkinter.TkVersion) +if tk_version == "8.4": + deprecate( + "Support for Tk/Tcl 8.4", 10, action="Please upgrade to Tk/Tcl 8.5 or newer" + ) diff --git a/sbsheriff/Lib/site-packages/PIL/_util.py b/sbsheriff/Lib/site-packages/PIL/_util.py new file mode 100644 index 0000000..ba27b7e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/_util.py @@ -0,0 +1,19 @@ +import os +from pathlib import Path + + +def is_path(f): + return isinstance(f, (bytes, str, Path)) + + +def is_directory(f): + """Checks if an object is a string, and that it points to a directory.""" + return is_path(f) and os.path.isdir(f) + + +class DeferredError: + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/sbsheriff/Lib/site-packages/PIL/_version.py b/sbsheriff/Lib/site-packages/PIL/_version.py new file mode 100644 index 0000000..43896fa --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/_version.py @@ -0,0 +1,2 @@ +# Master version for Pillow +__version__ = "9.3.0" diff --git a/sbsheriff/Lib/site-packages/PIL/_webp.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/PIL/_webp.cp310-win_amd64.pyd new file mode 100644 index 0000000..898ef92 Binary files /dev/null and b/sbsheriff/Lib/site-packages/PIL/_webp.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/PIL/features.py b/sbsheriff/Lib/site-packages/PIL/features.py new file mode 100644 index 0000000..3838568 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PIL/features.py @@ -0,0 +1,320 @@ +import collections +import os +import sys +import warnings + +import PIL + +from . import Image + +modules = { + "pil": ("PIL._imaging", "PILLOW_VERSION"), + "tkinter": ("PIL._tkinter_finder", "tk_version"), + "freetype2": ("PIL._imagingft", "freetype2_version"), + "littlecms2": ("PIL._imagingcms", "littlecms_version"), + "webp": ("PIL._webp", "webpdecoder_version"), +} + + +def check_module(feature): + """ + Checks if a module is available. + + :param feature: The module to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if not (feature in modules): + raise ValueError(f"Unknown module {feature}") + + module, ver = modules[feature] + + try: + __import__(module) + return True + except ImportError: + return False + + +def version_module(feature): + """ + :param feature: The module to check for. + :returns: + The loaded version number as a string, or ``None`` if unknown or not available. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if not check_module(feature): + return None + + module, ver = modules[feature] + + if ver is None: + return None + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_modules(): + """ + :returns: A list of all supported modules. + """ + return [f for f in modules if check_module(f)] + + +codecs = { + "jpg": ("jpeg", "jpeglib"), + "jpg_2000": ("jpeg2k", "jp2klib"), + "zlib": ("zip", "zlib"), + "libtiff": ("libtiff", "libtiff"), +} + + +def check_codec(feature): + """ + Checks if a codec is available. + + :param feature: The codec to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if feature not in codecs: + raise ValueError(f"Unknown codec {feature}") + + codec, lib = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def version_codec(feature): + """ + :param feature: The codec to check for. + :returns: + The version number as a string, or ``None`` if not available. + Checked at compile time for ``jpg``, run-time otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if not check_codec(feature): + return None + + codec, lib = codecs[feature] + + version = getattr(Image.core, lib + "_version") + + if feature == "libtiff": + return version.split("\n")[0].split("Version ")[1] + + return version + + +def get_supported_codecs(): + """ + :returns: A list of all supported codecs. + """ + return [f for f in codecs if check_codec(f)] + + +features = { + "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None), + "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None), + "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None), + "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"), + "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"), + "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"), + "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"), + "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"), + "xcb": ("PIL._imaging", "HAVE_XCB", None), +} + + +def check_feature(feature): + """ + Checks if a feature is available. + + :param feature: The feature to check for. + :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if feature not in features: + raise ValueError(f"Unknown feature {feature}") + + module, flag, ver = features[feature] + + try: + imported_module = __import__(module, fromlist=["PIL"]) + return getattr(imported_module, flag) + except ImportError: + return None + + +def version_feature(feature): + """ + :param feature: The feature to check for. + :returns: The version number as a string, or ``None`` if not available. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if not check_feature(feature): + return None + + module, flag, ver = features[feature] + + if ver is None: + return None + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_features(): + """ + :returns: A list of all supported features. + """ + return [f for f in features if check_feature(f)] + + +def check(feature): + """ + :param feature: A module, codec, or feature name. + :returns: + ``True`` if the module, codec, or feature is available, + ``False`` or ``None`` otherwise. + """ + + if feature in modules: + return check_module(feature) + if feature in codecs: + return check_codec(feature) + if feature in features: + return check_feature(feature) + warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2) + return False + + +def version(feature): + """ + :param feature: + The module, codec, or feature to check for. + :returns: + The version number as a string, or ``None`` if unknown or not available. + """ + if feature in modules: + return version_module(feature) + if feature in codecs: + return version_codec(feature) + if feature in features: + return version_feature(feature) + return None + + +def get_supported(): + """ + :returns: A list of all supported modules, features, and codecs. + """ + + ret = get_supported_modules() + ret.extend(get_supported_features()) + ret.extend(get_supported_codecs()) + return ret + + +def pilinfo(out=None, supported_formats=True): + """ + Prints information about this installation of Pillow. + This function can be called with ``python3 -m PIL``. + + :param out: + The output stream to print to. Defaults to ``sys.stdout`` if ``None``. + :param supported_formats: + If ``True``, a list of all supported image file formats will be printed. + """ + + if out is None: + out = sys.stdout + + Image.init() + + print("-" * 68, file=out) + print(f"Pillow {PIL.__version__}", file=out) + py_version = sys.version.splitlines() + print(f"Python {py_version[0].strip()}", file=out) + for py_version in py_version[1:]: + print(f" {py_version.strip()}", file=out) + print("-" * 68, file=out) + print( + f"Python modules loaded from {os.path.dirname(Image.__file__)}", + file=out, + ) + print( + f"Binary modules loaded from {os.path.dirname(Image.core.__file__)}", + file=out, + ) + print("-" * 68, file=out) + + for name, feature in [ + ("pil", "PIL CORE"), + ("tkinter", "TKINTER"), + ("freetype2", "FREETYPE2"), + ("littlecms2", "LITTLECMS2"), + ("webp", "WEBP"), + ("transp_webp", "WEBP Transparency"), + ("webp_mux", "WEBPMUX"), + ("webp_anim", "WEBP Animation"), + ("jpg", "JPEG"), + ("jpg_2000", "OPENJPEG (JPEG2000)"), + ("zlib", "ZLIB (PNG/ZIP)"), + ("libtiff", "LIBTIFF"), + ("raqm", "RAQM (Bidirectional Text)"), + ("libimagequant", "LIBIMAGEQUANT (Quantization method)"), + ("xcb", "XCB (X protocol)"), + ]: + if check(name): + if name == "jpg" and check_feature("libjpeg_turbo"): + v = "libjpeg-turbo " + version_feature("libjpeg_turbo") + else: + v = version(name) + if v is not None: + version_static = name in ("pil", "jpg") + if name == "littlecms2": + # this check is also in src/_imagingcms.c:setup_module() + version_static = tuple(int(x) for x in v.split(".")) < (2, 7) + t = "compiled for" if version_static else "loaded" + if name == "raqm": + for f in ("fribidi", "harfbuzz"): + v2 = version_feature(f) + if v2 is not None: + v += f", {f} {v2}" + print("---", feature, "support ok,", t, v, file=out) + else: + print("---", feature, "support ok", file=out) + else: + print("***", feature, "support not installed", file=out) + print("-" * 68, file=out) + + if supported_formats: + extensions = collections.defaultdict(list) + for ext, i in Image.EXTENSION.items(): + extensions[i].append(ext) + + for i in sorted(Image.ID): + line = f"{i}" + if i in Image.MIME: + line = f"{line} {Image.MIME[i]}" + print(line, file=out) + + if i in extensions: + print( + "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out + ) + + features = [] + if i in Image.OPEN: + features.append("open") + if i in Image.SAVE: + features.append("save") + if i in Image.SAVE_ALL: + features.append("save_all") + if i in Image.DECODERS: + features.append("decode") + if i in Image.ENCODERS: + features.append("encode") + + print("Features: {}".format(", ".join(features)), file=out) + print("-" * 68, file=out) diff --git a/sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/INSTALLER b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/INSTALLER similarity index 100% rename from sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/INSTALLER rename to sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/INSTALLER diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/LICENSE b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/LICENSE new file mode 100644 index 0000000..cf43d75 --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/LICENSE @@ -0,0 +1,1194 @@ +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2022 by Alex Clark and contributors + +Like PIL, Pillow is licensed under the open source HPND License: + +By obtaining, using, and/or copying this software and/or its associated +documentation, you agree that you have read, understood, and will comply +with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its +associated documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appears in all copies, and that +both that copyright notice and this permission notice appear in supporting +documentation, and that the name of Secret Labs AB or the author not be +used in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +===== brotli-1.0.9 ===== + +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +===== freetype-2.12.1 ===== + +FREETYPE LICENSES +----------------- + +The FreeType 2 font engine is copyrighted work and cannot be used +legally without a software license. In order to make this project +usable to a vast majority of developers, we distribute it under two +mutually exclusive open-source licenses. + +This means that *you* must choose *one* of the two licenses described +below, then obey all its terms and conditions when using FreeType 2 in +any of your projects or products. + + - The FreeType License, found in the file `docs/FTL.TXT`, which is + similar to the original BSD license *with* an advertising clause + that forces you to explicitly cite the FreeType project in your + product's documentation. All details are in the license file. + This license is suited to products which don't use the GNU General + Public License. + + Note that this license is compatible to the GNU General Public + License version 3, but not version 2. + + - The GNU General Public License version 2, found in + `docs/GPLv2.TXT` (any later version can be used also), for + programs which already use the GPL. Note that the FTL is + incompatible with GPLv2 due to its advertisement clause. + +The contributed BDF and PCF drivers come with a license similar to +that of the X Window System. It is compatible to the above two +licenses (see files `src/bdf/README` and `src/pcf/README`). The same +holds for the source code files `src/base/fthash.c` and +`include/freetype/internal/fthash.h`; they wer part of the BDF driver +in earlier FreeType versions. + +The gzip module uses the zlib license (see `src/gzip/zlib.h`) which +too is compatible to the above two licenses. + +The MD5 checksum support (only used for debugging in development +builds) is in the public domain. + + +--- end of LICENSE.TXT --- + The FreeType Project LICENSE + ---------------------------- + + 2006-Jan-27 + + Copyright 1996-2002, 2006 by + David Turner, Robert Wilhelm, and Werner Lemberg + + + +Introduction +============ + + The FreeType Project is distributed in several archive packages; + some of them may contain, in addition to the FreeType font engine, + various tools and contributions which rely on, or relate to, the + FreeType Project. + + This license applies to all files found in such packages, and + which do not fall under their own explicit license. The license + affects thus the FreeType font engine, the test programs, + documentation and makefiles, at the very least. + + This license was inspired by the BSD, Artistic, and IJG + (Independent JPEG Group) licenses, which all encourage inclusion + and use of free software in commercial and freeware products + alike. As a consequence, its main points are that: + + o We don't promise that this software works. However, we will be + interested in any kind of bug reports. (`as is' distribution) + + o You can use this software for whatever you want, in parts or + full form, without having to pay us. (`royalty-free' usage) + + o You may not pretend that you wrote this software. If you use + it, or only parts of it, in a program, you must acknowledge + somewhere in your documentation that you have used the + FreeType code. (`credits') + + We specifically permit and encourage the inclusion of this + software, with or without modifications, in commercial products. + We disclaim all warranties covering The FreeType Project and + assume no liability related to The FreeType Project. + + + Finally, many people asked us for a preferred form for a + credit/disclaimer to use in compliance with this license. We thus + encourage you to use the following text: + + """ + Portions of this software are copyright © The FreeType + Project (www.freetype.org). All rights reserved. + """ + + Please replace with the value from the FreeType version you + actually use. + + +Legal Terms +=========== + +0. Definitions +-------------- + + Throughout this license, the terms `package', `FreeType Project', + and `FreeType archive' refer to the set of files originally + distributed by the authors (David Turner, Robert Wilhelm, and + Werner Lemberg) as the `FreeType Project', be they named as alpha, + beta or final release. + + `You' refers to the licensee, or person using the project, where + `using' is a generic term including compiling the project's source + code as well as linking it to form a `program' or `executable'. + This program is referred to as `a program using the FreeType + engine'. + + This license applies to all files distributed in the original + FreeType Project, including all source code, binaries and + documentation, unless otherwise stated in the file in its + original, unmodified form as distributed in the original archive. + If you are unsure whether or not a particular file is covered by + this license, you must contact us to verify this. + + The FreeType Project is copyright (C) 1996-2000 by David Turner, + Robert Wilhelm, and Werner Lemberg. All rights reserved except as + specified below. + +1. No Warranty +-------------- + + THE FREETYPE PROJECT IS PROVIDED `AS IS' WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OR THE INABILITY TO + USE, OF THE FREETYPE PROJECT. + +2. Redistribution +----------------- + + This license grants a worldwide, royalty-free, perpetual and + irrevocable right and license to use, execute, perform, compile, + display, copy, create derivative works of, distribute and + sublicense the FreeType Project (in both source and object code + forms) and derivative works thereof for any purpose; and to + authorize others to exercise some or all of the rights granted + herein, subject to the following conditions: + + o Redistribution of source code must retain this license file + (`FTL.TXT') unaltered; any additions, deletions or changes to + the original files must be clearly indicated in accompanying + documentation. The copyright notices of the unaltered, + original files must be preserved in all copies of source + files. + + o Redistribution in binary form must provide a disclaimer that + states that the software is based in part of the work of the + FreeType Team, in the distribution documentation. We also + encourage you to put an URL to the FreeType web page in your + documentation, though this isn't mandatory. + + These conditions apply to any software derived from or based on + the FreeType Project, not just the unmodified files. If you use + our work, you must acknowledge us. However, no fee need be paid + to us. + +3. Advertising +-------------- + + Neither the FreeType authors and contributors nor you shall use + the name of the other for commercial, advertising, or promotional + purposes without specific prior written permission. + + We suggest, but do not require, that you use one or more of the + following phrases to refer to this software in your documentation + or advertising materials: `FreeType Project', `FreeType Engine', + `FreeType library', or `FreeType Distribution'. + + As you have not signed this license, you are not required to + accept it. However, as the FreeType Project is copyrighted + material, only this license, or another one contracted with the + authors, grants you the right to use, distribute, and modify it. + Therefore, by using, distributing, or modifying the FreeType + Project, you indicate that you understand and accept all the terms + of this license. + +4. Contacts +----------- + + There are two mailing lists related to FreeType: + + o freetype@nongnu.org + + Discusses general use and applications of FreeType, as well as + future and wanted additions to the library and distribution. + If you are looking for support, start in this list if you + haven't found anything to help you in the documentation. + + o freetype-devel@nongnu.org + + Discusses bugs, as well as engine internals, design issues, + specific licenses, porting, etc. + + Our home page can be found at + + https://www.freetype.org + + +--- end of FTL.TXT --- + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Library General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Library General +Public License instead of this License. + +===== harfbuzz-5.3.1 ===== + +HarfBuzz is licensed under the so-called "Old MIT" license. Details follow. +For parts of HarfBuzz that are licensed under different licenses see individual +files names COPYING in subdirectories where applicable. + +Copyright © 2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020 Google, Inc. +Copyright © 2018,2019,2020 Ebrahim Byagowi +Copyright © 2019,2020 Facebook, Inc. +Copyright © 2012 Mozilla Foundation +Copyright © 2011 Codethink Limited +Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies) +Copyright © 2009 Keith Stribley +Copyright © 2009 Martin Hosken and SIL International +Copyright © 2007 Chris Wilson +Copyright © 2005,2006,2020,2021 Behdad Esfahbod +Copyright © 2005 David Turner +Copyright © 2004,2007,2008,2009,2010 Red Hat, Inc. +Copyright © 1998-2004 David Turner and Werner Lemberg + +For full copyright notices consult the individual files in the package. + + +Permission is hereby granted, without written agreement and without +license or royalty fees, to use, copy, modify, and distribute this +software and its documentation for any purpose, provided that the +above copyright notice and the following two paragraphs appear in +all copies of this software. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR +DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES +ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN +IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS +ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO +PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + +===== lcms2-2.13.1 ===== + +Little CMS +Copyright (c) 1998-2020 Marti Maria Saguer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject +to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +===== libjpeg-turbo-2.1.4 ===== + +LEGAL ISSUES +============ + +In plain English: + +1. We don't promise that this software works. (But if you find any bugs, + please let us know!) +2. You can use this software for whatever you want. You don't have to pay us. +3. You may not pretend that you wrote this software. If you use it in a + program, you must acknowledge somewhere in your documentation that + you've used the IJG code. + +In legalese: + +The authors make NO WARRANTY or representation, either express or implied, +with respect to this software, its quality, accuracy, merchantability, or +fitness for a particular purpose. This software is provided "AS IS", and you, +its user, assume the entire risk as to its quality and accuracy. + +This software is copyright (C) 1991-2020, Thomas G. Lane, Guido Vollbeding. +All Rights Reserved except as specified below. + +Permission is hereby granted to use, copy, modify, and distribute this +software (or portions thereof) for any purpose, without fee, subject to these +conditions: +(1) If any part of the source code for this software is distributed, then this +README file must be included, with this copyright and no-warranty notice +unaltered; and any additions, deletions, or changes to the original files +must be clearly indicated in accompanying documentation. +(2) If only executable code is distributed, then the accompanying +documentation must state that "this software is based in part on the work of +the Independent JPEG Group". +(3) Permission for use of this software is granted only if the user accepts +full responsibility for any undesirable consequences; the authors accept +NO LIABILITY for damages of any kind. + +These conditions apply to any software derived from or based on the IJG code, +not just to the unmodified library. If you use our work, you ought to +acknowledge us. + +Permission is NOT granted for the use of any IJG author's name or company name +in advertising or publicity relating to this software or products derived from +it. This software may be referred to only as "the Independent JPEG Group's +software". + +We specifically permit and encourage the use of this software as the basis of +commercial products, provided that all warranty or liability claims are +assumed by the product vendor. + +libjpeg-turbo Licenses +====================== + +libjpeg-turbo is covered by three compatible BSD-style open source licenses: + +- The IJG (Independent JPEG Group) License, which is listed in + [README.ijg](README.ijg) + + This license applies to the libjpeg API library and associated programs + (any code inherited from libjpeg, and any modifications to that code.) + +- The Modified (3-clause) BSD License, which is listed below + + This license covers the TurboJPEG API library and associated programs, as + well as the build system. + +- The [zlib License](https://opensource.org/licenses/Zlib) + + This license is a subset of the other two, and it covers the libjpeg-turbo + SIMD extensions. + + +Complying with the libjpeg-turbo Licenses +========================================= + +This section provides a roll-up of the libjpeg-turbo licensing terms, to the +best of our understanding. + +1. If you are distributing a modified version of the libjpeg-turbo source, + then: + + 1. You cannot alter or remove any existing copyright or license notices + from the source. + + **Origin** + - Clause 1 of the IJG License + - Clause 1 of the Modified BSD License + - Clauses 1 and 3 of the zlib License + + 2. You must add your own copyright notice to the header of each source + file you modified, so others can tell that you modified that file (if + there is not an existing copyright header in that file, then you can + simply add a notice stating that you modified the file.) + + **Origin** + - Clause 1 of the IJG License + - Clause 2 of the zlib License + + 3. You must include the IJG README file, and you must not alter any of the + copyright or license text in that file. + + **Origin** + - Clause 1 of the IJG License + +2. If you are distributing only libjpeg-turbo binaries without the source, or + if you are distributing an application that statically links with + libjpeg-turbo, then: + + 1. Your product documentation must include a message stating: + + This software is based in part on the work of the Independent JPEG + Group. + + **Origin** + - Clause 2 of the IJG license + + 2. If your binary distribution includes or uses the TurboJPEG API, then + your product documentation must include the text of the Modified BSD + License (see below.) + + **Origin** + - Clause 2 of the Modified BSD License + +3. You cannot use the name of the IJG or The libjpeg-turbo Project or the + contributors thereof in advertising, publicity, etc. + + **Origin** + - IJG License + - Clause 3 of the Modified BSD License + +4. The IJG and The libjpeg-turbo Project do not warrant libjpeg-turbo to be + free of defects, nor do we accept any liability for undesirable + consequences resulting from your use of the software. + + **Origin** + - IJG License + - Modified BSD License + - zlib License + + +The Modified (3-clause) BSD License +=================================== + +Copyright (C)2009-2022 D. R. Commander. All Rights Reserved.
+Copyright (C)2015 Viktor Szathmáry. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +- Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +- Neither the name of the libjpeg-turbo Project nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS", +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +Why Three Licenses? +=================== + +The zlib License could have been used instead of the Modified (3-clause) BSD +License, and since the IJG License effectively subsumes the distribution +conditions of the zlib License, this would have effectively placed +libjpeg-turbo binary distributions under the IJG License. However, the IJG +License specifically refers to the Independent JPEG Group and does not extend +attribution and endorsement protections to other entities. Thus, it was +desirable to choose a license that granted us the same protections for new code +that were granted to the IJG for code derived from their software. + +===== libwebp-1.2.4 ===== + +Copyright (c) 2010, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Google nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +===== lpng1638 ===== + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE +========================================= + +PNG Reference Library License version 2 +--------------------------------------- + + * Copyright (c) 1995-2022 The PNG Reference Library Authors. + * Copyright (c) 2018-2022 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +The software is supplied "as is", without warranty of any kind, +express or implied, including, without limitation, the warranties +of merchantability, fitness for a particular purpose, title, and +non-infringement. In no event shall the Copyright owners, or +anyone distributing the software, be liable for any damages or +other liability, whether in contract, tort or otherwise, arising +from, out of, or in connection with the software, or the use or +other dealings in the software, even if advised of the possibility +of such damage. + +Permission is hereby granted to use, copy, modify, and distribute +this software, or portions hereof, for any purpose, without fee, +subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you + must not claim that you wrote the original software. If you + use this software in a product, an acknowledgment in the product + documentation would be appreciated, but is not required. + + 2. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + + +PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35) +----------------------------------------------------------------------- + +libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are +Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are +derived from libpng-1.0.6, and are distributed according to the same +disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors: + + Simon-Pierre Cadieux + Eric S. Raymond + Mans Rullgard + Cosmin Truta + Gilles Vollant + James Yu + Mandar Sahastrabuddhe + Google Inc. + Vadim Barkov + +and with the following additions to the disclaimer: + + There is no warranty against interference with your enjoyment of + the library or against infringement. There is no warranty that our + efforts or the library will fulfill any of your particular purposes + or needs. This library is provided with all faults, and the entire + risk of satisfactory quality, performance, accuracy, and effort is + with the user. + +Some files in the "contrib" directory and some configure-generated +files that are distributed with libpng have other copyright owners, and +are released under other open source licenses. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from +libpng-0.96, and are distributed according to the same disclaimer and +license as libpng-0.96, with the following individuals added to the +list of Contributing Authors: + + Tom Lane + Glenn Randers-Pehrson + Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88, +and are distributed according to the same disclaimer and license as +libpng-0.88, with the following individuals added to the list of +Contributing Authors: + + John Bowler + Kevin Bracey + Sam Bushell + Magnus Holmgren + Greg Roelofs + Tom Tanner + +Some files in the "scripts" directory have other copyright owners, +but are released under this license. + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" +is defined as the following set of individuals: + + Andreas Dilger + Dave Martindale + Guy Eric Schalnat + Paul Schmidt + Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing +Authors and Group 42, Inc. disclaim all warranties, expressed or +implied, including, without limitation, the warranties of +merchantability and of fitness for any purpose. The Contributing +Authors and Group 42, Inc. assume no liability for direct, indirect, +incidental, special, exemplary, or consequential damages, which may +result from the use of the PNG Reference Library, even if advised of +the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this +source code, or portions hereof, for any purpose, without fee, subject +to the following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not + be misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, +without fee, and encourage the use of this source code as a component +to supporting the PNG file format in commercial products. If you use +this source code in a product, acknowledgment is not required but would +be appreciated. + +===== openjpeg-2.5.0 ===== + +/* + * The copyright in this software is being made available under the 2-clauses + * BSD License, included below. This software may be subject to other third + * party and contributor rights, including patent rights, and no such rights + * are granted under this license. + * + * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium + * Copyright (c) 2002-2014, Professor Benoit Macq + * Copyright (c) 2003-2014, Antonin Descampe + * Copyright (c) 2003-2009, Francois-Olivier Devaux + * Copyright (c) 2005, Herve Drolon, FreeImage Team + * Copyright (c) 2002-2003, Yannick Verschueren + * Copyright (c) 2001-2003, David Janssens + * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France + * Copyright (c) 2012, CS Systemes d'Information, France + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +===== tiff-4.4.0 ===== + +Copyright (c) 1988-1997 Sam Leffler +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and +its documentation for any purpose is hereby granted without fee, provided +that (i) the above copyright notices and this permission notice appear in +all copies of the software and related documentation, and (ii) the names of +Sam Leffler and Silicon Graphics may not be used in any advertising or +publicity relating to the software without the specific, prior written +permission of Sam Leffler and Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, +EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY +WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR +ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF +LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +OF THIS SOFTWARE. + +===== xz-5.2.7 ===== + + +XZ Utils Licensing +================== + + Different licenses apply to different files in this package. Here + is a rough summary of which licenses apply to which parts of this + package (but check the individual files to be sure!): + + - liblzma is in the public domain. + + - xz, xzdec, and lzmadec command line tools are in the public + domain unless GNU getopt_long had to be compiled and linked + in from the lib directory. The getopt_long code is under + GNU LGPLv2.1+. + + - The scripts to grep, diff, and view compressed files have been + adapted from gzip. These scripts and their documentation are + under GNU GPLv2+. + + - All the documentation in the doc directory and most of the + XZ Utils specific documentation files in other directories + are in the public domain. + + - Translated messages are in the public domain. + + - The build system contains public domain files, and files that + are under GNU GPLv2+ or GNU GPLv3+. None of these files end up + in the binaries being built. + + - Test files and test code in the tests directory, and debugging + utilities in the debug directory are in the public domain. + + - The extra directory may contain public domain files, and files + that are under various free software licenses. + + You can do whatever you want with the files that have been put into + the public domain. If you find public domain legally problematic, + take the previous sentence as a license grant. If you still find + the lack of copyright legally problematic, you have too many + lawyers. + + As usual, this software is provided "as is", without any warranty. + + If you copy significant amounts of public domain code from XZ Utils + into your project, acknowledging this somewhere in your software is + polite (especially if it is proprietary, non-free software), but + naturally it is not legally required. Here is an example of a good + notice to put into "about box" or into documentation: + + This software includes code from XZ Utils . + + The following license texts are included in the following files: + - COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1 + - COPYING.GPLv2: GNU General Public License version 2 + - COPYING.GPLv3: GNU General Public License version 3 + + Note that the toolchain (compiler, linker etc.) may add some code + pieces that are copyrighted. Thus, it is possible that e.g. liblzma + binary wouldn't actually be in the public domain in its entirety + even though it contains no copyrighted code from the XZ Utils source + package. + + If you have questions, don't hesitate to ask the author(s) for more + information. + + +===== zlib-1.2.13 ===== + + (C) 1995-2022 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +If you use the zlib library in a product, we would appreciate *not* receiving +lengthy legal documents to sign. The sources are provided for free but without +warranty of any kind. The library has been entirely written by Jean-loup +Gailly and Mark Adler; it does not include third-party code. We make all +contributions to and distributions of this project solely in our personal +capacity, and are not conveying any rights to any intellectual property of +any third parties. + +If you redistribute modified sources, we would appreciate that you include in +the file ChangeLog history information documenting your changes. Please read +the FAQ for more information on the distribution of modified source versions. diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/METADATA b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/METADATA new file mode 100644 index 0000000..4a7e8bb --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/METADATA @@ -0,0 +1,171 @@ +Metadata-Version: 2.1 +Name: Pillow +Version: 9.3.0 +Summary: Python Imaging Library (Fork) +Home-page: https://python-pillow.org +Author: Alex Clark (PIL Fork Author) +Author-email: aclark@python-pillow.org +License: HPND +Project-URL: Documentation, https://pillow.readthedocs.io +Project-URL: Source, https://github.com/python-pillow/Pillow +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi +Project-URL: Release notes, https://pillow.readthedocs.io/en/stable/releasenotes/index.html +Project-URL: Changelog, https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst +Project-URL: Twitter, https://twitter.com/PythonPillow +Keywords: Imaging +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND) +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: olefile ; extra == 'docs' +Requires-Dist: sphinx (>=2.4) ; extra == 'docs' +Requires-Dist: sphinx-copybutton ; extra == 'docs' +Requires-Dist: sphinx-issues (>=3.0.1) ; extra == 'docs' +Requires-Dist: sphinx-removed-in ; extra == 'docs' +Requires-Dist: sphinxext-opengraph ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: check-manifest ; extra == 'tests' +Requires-Dist: coverage ; extra == 'tests' +Requires-Dist: defusedxml ; extra == 'tests' +Requires-Dist: markdown2 ; extra == 'tests' +Requires-Dist: olefile ; extra == 'tests' +Requires-Dist: packaging ; extra == 'tests' +Requires-Dist: pyroma ; extra == 'tests' +Requires-Dist: pytest ; extra == 'tests' +Requires-Dist: pytest-cov ; extra == 'tests' +Requires-Dist: pytest-timeout ; extra == 'tests' + +

+ Pillow logo +

+ +# Pillow + +## Python Imaging Library (Fork) + +Pillow is the friendly PIL fork by [Alex Clark and +Contributors](https://github.com/python-pillow/Pillow/graphs/contributors). +PIL is the Python Imaging Library by Fredrik Lundh and Contributors. +As of 2019, Pillow development is +[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise). + + + + + + + + + + + + + + + + + + +
docs + Documentation Status +
tests + GitHub Actions build status (Lint) + GitHub Actions build status (Test Linux and macOS) + GitHub Actions build status (Test Windows) + GitHub Actions build status (Test MinGW) + GitHub Actions build status (Test Cygwin) + GitHub Actions build status (Test Docker) + AppVeyor CI build status (Windows) + GitHub Actions wheels build status (Wheels) + Travis CI wheels build status (aarch64) + Code coverage + Tidelift Align +
package + Zenodo + Tidelift + Newest PyPI version + Number of PyPI downloads + OpenSSF Best Practices +
social + Join the chat at https://gitter.im/python-pillow/Pillow + Follow on https://twitter.com/PythonPillow +
+ +## Overview + +The Python Imaging Library adds image processing capabilities to your Python interpreter. + +This library provides extensive file format support, an efficient internal representation, and fairly powerful image processing capabilities. + +The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool. + +## More Information + +- [Documentation](https://pillow.readthedocs.io/) + - [Installation](https://pillow.readthedocs.io/en/latest/installation.html) + - [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html) +- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md) + - [Issues](https://github.com/python-pillow/Pillow/issues) + - [Pull requests](https://github.com/python-pillow/Pillow/pulls) +- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) +- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) + - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork) + +## Report a Vulnerability + +To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security). diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/RECORD b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/RECORD new file mode 100644 index 0000000..bd6bdf5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/RECORD @@ -0,0 +1,203 @@ +PIL/BdfFontFile.py,sha256=ft6ACl7gHSxvC0fHg_LeIlSLqigQfyVXsx1fTR1ZtdQ,2927 +PIL/BlpImagePlugin.py,sha256=qHl5rqoiksRRZsGPquIKmUv0S_L89MY7A9g8qqK7Ewg,16246 +PIL/BmpImagePlugin.py,sha256=0HJgDXAiwiwxsoQPzjxezGDv-AVfPI_OithBCuWYpj0,17923 +PIL/BufrStubImagePlugin.py,sha256=xNLOLfTtUsGo_liBkCWaRdtZmdqOR42jklo9BsOt9yg,1591 +PIL/ContainerIO.py,sha256=NmRN9naqGy3KuR9nup6o1ihJRStbBHk-vyNpJb6oQF8,3003 +PIL/CurImagePlugin.py,sha256=TNR_IlHMR7wY8diTnBvipjVEGoiAPT_6c8Vj7sgxeNU,1754 +PIL/DcxImagePlugin.py,sha256=rFMRcOdlc22aFzdSF_ZsMGtQk3ShxjwDqXfLPSpIPls,2018 +PIL/DdsImagePlugin.py,sha256=cAkIklPIpulcm8ZlW6d_r_O9JSihdYKoV-3klS6bI9I,8999 +PIL/EpsImagePlugin.py,sha256=5T2Nq_P31SGNPirxSmw3F_3caycbczTpSBOpxCrXbsM,12470 +PIL/ExifTags.py,sha256=5RasI_fkXeW0HnOl5MAWTOBdTWfNdIcQgC4wScEoGNY,9355 +PIL/FitsImagePlugin.py,sha256=9d3y25DIDatx1ITt5FLDnSY3KCpI5FmdAVDpEzk5tBI,2042 +PIL/FitsStubImagePlugin.py,sha256=tOHyZfrOT3qj2bqEYtMooMYG5h5q3U-YPUsOntL3ZBc,1736 +PIL/FliImagePlugin.py,sha256=6XGD7AMh5xilCuggOCQo1cH6LgRICy6RqyRV8jdk8hM,4572 +PIL/FontFile.py,sha256=v6gK9QswTk6ylNlR20AF8zLK4agY7h75aaGqvigVQBc,2876 +PIL/FpxImagePlugin.py,sha256=UYigfH4bLKN9McN7d300kAxNHmHxWsW56ss-JAJCTcw,6942 +PIL/FtexImagePlugin.py,sha256=sf_GyQwRc7UZzWl4SG_fZs9qP5ovSvSUbTXyQZhIOkM,3919 +PIL/GbrImagePlugin.py,sha256=iu-0bW7lzaXdQDauGBEe8JzJXjQQ64Avw_sDZRNP1Os,2893 +PIL/GdImageFile.py,sha256=2_MSR2gFaAlI3C43jt0V_pnwO69vQqvarM8jRHqbIRY,2645 +PIL/GifImagePlugin.py,sha256=Xy0jU67qz56KxFaFCyguYYNIjrvsOCyIJZEmUABrmtE,36611 +PIL/GimpGradientFile.py,sha256=_d696RwCHZ3b1SXGmiAHj6wKIEvwkl28QhFMhaAwFas,3493 +PIL/GimpPaletteFile.py,sha256=nGPjhLwEQOSTIuqJJdaiSrMAQEWKt6PborW7XIcJSF8,1330 +PIL/GribStubImagePlugin.py,sha256=2JZxiDVNvapSWCzCPTsXg961EtfCncRh9PuzXNzkyEg,1585 +PIL/Hdf5StubImagePlugin.py,sha256=FLkkm2ZhA6nM5u984ILkTo2W09lq9NHTQZK-yCooZ9E,1588 +PIL/IcnsImagePlugin.py,sha256=4d-ij37S6wt1iyuhzwhUnu6QpQ-BnoHXO-8S-Jkcncg,12147 +PIL/IcoImagePlugin.py,sha256=a8R8-qeb1DKFEhWf9n4-QHEwcVBBigwYA0cRSwr8Opk,11906 +PIL/ImImagePlugin.py,sha256=IMMTeceooYAGRXUYPTciYmAnQIzxmTx9ytcVkoSHsgY,11123 +PIL/Image.py,sha256=bENgO40jNhtYNjNu2J6w-CJ50KJohfTGfp-1jqRHSrI,131946 +PIL/ImageChops.py,sha256=y4xHGOzxaz3-QADLXkpjMt929g4DSi2Pf3ceYAtFSbE,7657 +PIL/ImageCms.py,sha256=VSi39rgc71snXtZVh7eR5cfZ6Zy7Ab0pfjdg-9gTRLY,38585 +PIL/ImageColor.py,sha256=y0jaeL2wOsXxw4J4IQou28YT3Gq2KbwMhB2WBf77bUo,9063 +PIL/ImageDraw.py,sha256=ybi69QgefRwIU5hq5pola_CTYLZ9Ob6aNRf--_iEXtk,37684 +PIL/ImageDraw2.py,sha256=qvNpBIQlq_TP3dyLNTb8rjB-QtY1rWbXq9z0TAaVYZo,6210 +PIL/ImageEnhance.py,sha256=tHMwYy_knQ_tice_c5MzDShwkFVL5DyJ1EVm1KHiDDI,3293 +PIL/ImageFile.py,sha256=EQYNe2Obpx42gf8XKweUsnY_LDK8LrkP-bu0rOk8r3Q,23745 +PIL/ImageFilter.py,sha256=WGdEzYl5VH-BmJx5hM_kyksrQYHA6KQli7j-gtD8R40,16709 +PIL/ImageFont.py,sha256=NP5fVmByDVvebyeU2jGBG_FdYADNn5gJ7I6GapCeezo,50932 +PIL/ImageGrab.py,sha256=pOKxsvnuV7ZOrDbMsajDa9_4Y_g-2xkJDGa8WimpUWY,4357 +PIL/ImageMath.py,sha256=szLcW4EZMHcJGa3P5dAEnKmPxsGk3GrXiFgJUMqnTIM,7512 +PIL/ImageMode.py,sha256=QIO9TR7zlbU0pYMUY3rVn-lOVKPrXismjmh2hX0DOrI,3097 +PIL/ImageMorph.py,sha256=I6D_DDIQNvmINxvXkgSN90_2wDTi01ICS9UE1ApYx1g,8016 +PIL/ImageOps.py,sha256=OO1VWnhP7oNPpnekwP_YZrv3pQBnxq-9Yp5AROD1F_k,21407 +PIL/ImagePalette.py,sha256=6qIZGwJS3OqrGIFQbM_j2ks5UtuWpnxlfL7n-wczcmU,8269 +PIL/ImagePath.py,sha256=IZ7vxarm_tI2DuV7QPchZe1P4U52ypHbO-v3JkcGm44,355 +PIL/ImageQt.py,sha256=JDcS_vQ8mgz5SZGWcN8fau2YmPlN-r3oDapMFdEQOcM,7100 +PIL/ImageSequence.py,sha256=IisJzYgqySUR9fZFDPSaz2DrXMIvZSxSBv7QI6sqqEQ,1925 +PIL/ImageShow.py,sha256=vudo3N7KUnK0_rrF_kaBgorCv8kne21WiGr30lGXHPw,11617 +PIL/ImageStat.py,sha256=wYyP2mnRMJ_L7RIKUb1BfeznjwMqAzh1MPgMvLnFqPA,4049 +PIL/ImageTk.py,sha256=to1JotNafxy355ZYjK5lRpT1Yma0Yh561OW5egzdqM0,8973 +PIL/ImageTransform.py,sha256=EsgO8FV2Gnm1hBMVx-8i7I3bhehRfMlwHIsV7QQ7FjM,2985 +PIL/ImageWin.py,sha256=qklIa-nlezkM_BUVRaIOgjSqRDWjQq8Qxe_r3sQy3Ro,7421 +PIL/ImtImagePlugin.py,sha256=NyPvO-oFnZR54EphM5gIB-MfGYVfr5NV8mkRy2frsSU,2667 +PIL/IptcImagePlugin.py,sha256=mwIZtFBfMgrQqhDg4OQA5b7_PgXAkhdpCX0ToRgXuvk,5944 +PIL/Jpeg2KImagePlugin.py,sha256=cps__e5lwn_UVN6B79SxY-wcUmZ6kncNBE-awzLFxSE,10813 +PIL/JpegImagePlugin.py,sha256=ca2Xw5E_Wr-UjIS0t6np6oX7ljJEcBX237cXtchCeWk,29333 +PIL/JpegPresets.py,sha256=7lEumxxIdQrdc4Eync4R8IItvu7WyP6KKY9vaR3RHfY,12583 +PIL/McIdasImagePlugin.py,sha256=gWdASbeTJZvScvE7TLSEvI7qQNlHSix8HHd_0vk5IBY,1829 +PIL/MicImagePlugin.py,sha256=P2oYuYSU336JZM-6AiixP67-XJrPWYDnhIu7ItcLu94,2471 +PIL/MpegImagePlugin.py,sha256=ML-YphCT794W-bZpWjKBAssJTMYEOenXn6-nwRp92oc,1886 +PIL/MpoImagePlugin.py,sha256=VwjJNb9yqbdY3zFanu1FlvW0P2Qv3bqQBTkLAWVdeik,6054 +PIL/MspImagePlugin.py,sha256=g-B74DSmaISA-UWybavdLjJGpRkb3W5tBQ6jOW5MqkU,5721 +PIL/PSDraw.py,sha256=rxUa015jdjOMgQFsIi8DPaHxCNaBN-l0FIWMB03Let0,6754 +PIL/PaletteFile.py,sha256=It7KEEqGCtBh6rSsNd48tqoyxyZBVu7KWerPU-PEqwc,1158 +PIL/PalmImagePlugin.py,sha256=UTEyYM6gA0k91ZkTRRdWS8pIrLft3ajKgXDi_KGX6-w,9335 +PIL/PcdImagePlugin.py,sha256=D8pFFz5wLn0RMcv5n_7H3-OuawsoK_r7USMAi-_nmeg,1539 +PIL/PcfFontFile.py,sha256=Eo-zZz09mKjDJkcB_g5vsT63Zn9h_5-5wBt7F7LHZZ0,6609 +PIL/PcxImagePlugin.py,sha256=H19vaKBFt2Jj_zoDo02S0ESJHdbVkgpGzvqkfx-1cpc,6160 +PIL/PdfImagePlugin.py,sha256=disaNfoTqYWdWhjuNwg2H7ZxK8d0pS7wG8-wOp87toU,8767 +PIL/PdfParser.py,sha256=uCTxZAZ5g88xWpt-GMs5mUKq-Ci1JXaYo6TUu58yybs,35545 +PIL/PixarImagePlugin.py,sha256=2Q68eTeBfQbN4l3yrB2NChlCW_AqeUELGct9m0f1ueA,1701 +PIL/PngImagePlugin.py,sha256=nhREfcJpVtgjPDtPalrNOwEIgiZK1WDNU0MaeF60X94,47445 +PIL/PpmImagePlugin.py,sha256=a2KdHbsXYMZ6z9hrYNOCFoCWJxJfDJo5mfoQ2DzQNJk,11552 +PIL/PsdImagePlugin.py,sha256=cIUzYhiV-o3w_WBRP2wvTT8Sf3K12DKXDMkobQcLvCg,7769 +PIL/PyAccess.py,sha256=Yl6jc8kx10A1ImofadL4VzG6g7UGodFSJxqVDPcVv4M,10189 +PIL/SgiImagePlugin.py,sha256=TemnZadWg9c0DkCRAfQtY8OOdh1jiMuvC9GOfJdHZhk,6334 +PIL/SpiderImagePlugin.py,sha256=CBIuVa-gY7wfWFz0CHUxTMD4k01J7c25YYMYCvciKGA,9656 +PIL/SunImagePlugin.py,sha256=7K1T5U4TguGJXnOx-kcJxiC7408Z_uG3WmENT6j1ZUE,4418 +PIL/TarIO.py,sha256=azx_NJqCITW6WtPWDZDJBbmLxcKsdSNm33jvzTiBSXc,1505 +PIL/TgaImagePlugin.py,sha256=zoo-oWLlzm4GepMznP9Ste8iXjSWFmTA-ddIxtopqbo,6748 +PIL/TiffImagePlugin.py,sha256=MEOgwklhNFI01ebB6wMh82Q_FFy896PhX8FEQOJWUnk,79188 +PIL/TiffTags.py,sha256=eG05wgFMflE3gWWi0zKEhIfiQcM-5keS-kAfroftrm0,17327 +PIL/WalImageFile.py,sha256=QfSfJsERW370SGNcxhbHR7bv34kGTWWXz95uUoxOdxk,5644 +PIL/WebPImagePlugin.py,sha256=bfoL36cpEsnMtcTtR7WV_P_8g-1FFbaHwB-_RNnkwKo,11319 +PIL/WmfImagePlugin.py,sha256=VIVkwQJJq5h7KxJgUb_Vcon1e3SF-sFYn4qXOL9AuM8,4802 +PIL/XVThumbImagePlugin.py,sha256=1m1C33oWTeHnJe547CAVLVHaCfkbGDBX_H87diMx7dQ,2018 +PIL/XbmImagePlugin.py,sha256=1DAttuFon4ugdLU10CYJ4VoGiEQqnjwovN8VefbnGsw,2545 +PIL/XpmImagePlugin.py,sha256=A-owT2obrue0KhOcV36j_tv4yBsjhN_AZoRqcq5HTN4,3191 +PIL/__init__.py,sha256=oKJhS2T2H8DqnXPmBS_dXOuJafg-xNaBM4jpEWRl4hk,1843 +PIL/__main__.py,sha256=hOw0dx7KqDFGy9lxphlkL6NmaCbj8lp294vXH4n35ko,44 +PIL/__pycache__/BdfFontFile.cpython-310.pyc,, +PIL/__pycache__/BlpImagePlugin.cpython-310.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-310.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc,, +PIL/__pycache__/ContainerIO.cpython-310.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-310.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-310.pyc,, +PIL/__pycache__/DdsImagePlugin.cpython-310.pyc,, +PIL/__pycache__/EpsImagePlugin.cpython-310.pyc,, +PIL/__pycache__/ExifTags.cpython-310.pyc,, +PIL/__pycache__/FitsImagePlugin.cpython-310.pyc,, +PIL/__pycache__/FitsStubImagePlugin.cpython-310.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-310.pyc,, +PIL/__pycache__/FontFile.cpython-310.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-310.pyc,, +PIL/__pycache__/FtexImagePlugin.cpython-310.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-310.pyc,, +PIL/__pycache__/GdImageFile.cpython-310.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-310.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-310.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-310.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-310.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-310.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-310.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-310.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-310.pyc,, +PIL/__pycache__/Image.cpython-310.pyc,, +PIL/__pycache__/ImageChops.cpython-310.pyc,, +PIL/__pycache__/ImageCms.cpython-310.pyc,, +PIL/__pycache__/ImageColor.cpython-310.pyc,, +PIL/__pycache__/ImageDraw.cpython-310.pyc,, +PIL/__pycache__/ImageDraw2.cpython-310.pyc,, +PIL/__pycache__/ImageEnhance.cpython-310.pyc,, +PIL/__pycache__/ImageFile.cpython-310.pyc,, +PIL/__pycache__/ImageFilter.cpython-310.pyc,, +PIL/__pycache__/ImageFont.cpython-310.pyc,, +PIL/__pycache__/ImageGrab.cpython-310.pyc,, +PIL/__pycache__/ImageMath.cpython-310.pyc,, +PIL/__pycache__/ImageMode.cpython-310.pyc,, +PIL/__pycache__/ImageMorph.cpython-310.pyc,, +PIL/__pycache__/ImageOps.cpython-310.pyc,, +PIL/__pycache__/ImagePalette.cpython-310.pyc,, +PIL/__pycache__/ImagePath.cpython-310.pyc,, +PIL/__pycache__/ImageQt.cpython-310.pyc,, +PIL/__pycache__/ImageSequence.cpython-310.pyc,, +PIL/__pycache__/ImageShow.cpython-310.pyc,, +PIL/__pycache__/ImageStat.cpython-310.pyc,, +PIL/__pycache__/ImageTk.cpython-310.pyc,, +PIL/__pycache__/ImageTransform.cpython-310.pyc,, +PIL/__pycache__/ImageWin.cpython-310.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-310.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-310.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-310.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-310.pyc,, +PIL/__pycache__/JpegPresets.cpython-310.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-310.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-310.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-310.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-310.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PSDraw.cpython-310.pyc,, +PIL/__pycache__/PaletteFile.cpython-310.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PcfFontFile.cpython-310.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PdfParser.cpython-310.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-310.pyc,, +PIL/__pycache__/PyAccess.cpython-310.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-310.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-310.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-310.pyc,, +PIL/__pycache__/TarIO.cpython-310.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-310.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-310.pyc,, +PIL/__pycache__/TiffTags.cpython-310.pyc,, +PIL/__pycache__/WalImageFile.cpython-310.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-310.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-310.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-310.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-310.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-310.pyc,, +PIL/__pycache__/__init__.cpython-310.pyc,, +PIL/__pycache__/__main__.cpython-310.pyc,, +PIL/__pycache__/_binary.cpython-310.pyc,, +PIL/__pycache__/_deprecate.cpython-310.pyc,, +PIL/__pycache__/_tkinter_finder.cpython-310.pyc,, +PIL/__pycache__/_util.cpython-310.pyc,, +PIL/__pycache__/_version.cpython-310.pyc,, +PIL/__pycache__/features.cpython-310.pyc,, +PIL/_binary.py,sha256=Ts2HKoKEMc9N4DsgIYTmJM_ecjKsexxJhsL6zR0tmuQ,2145 +PIL/_deprecate.py,sha256=nqEbnvvgBkeTzPfzlHYDvg9NxsQKbf7O-Q4JDtzjLO4,1940 +PIL/_imaging.cp310-win_amd64.pyd,sha256=PFO7xZex7nXRcjU8wOynBmZdBmZHL7YsjRk3-KFQi6g,2371072 +PIL/_imagingcms.cp310-win_amd64.pyd,sha256=mFcZknSN6f3vB1gKm1RXkRQAhTpdUVK00U2WXdVchVE,254464 +PIL/_imagingft.cp310-win_amd64.pyd,sha256=ad8KWm6zsOjEuXIsOhzUasQAVWujZt6ZjA-c_ghjNgg,1649664 +PIL/_imagingmath.cp310-win_amd64.pyd,sha256=JCoDoMI5_5hU_OddyzREpsdryoxyu1Mi7N_XZ37MvjY,24064 +PIL/_imagingmorph.cp310-win_amd64.pyd,sha256=ThJvw_ZSmwj3FSoNina7gpoAQQJdovnzWVzZKKKneGw,13312 +PIL/_imagingtk.cp310-win_amd64.pyd,sha256=cIJPYngi7Inmy386HR738AUh-nLdVBceQzpzD_gaJ98,14848 +PIL/_tkinter_finder.py,sha256=yTJvJBNR61MpDzWmKu_3C86QbtPIqacJkA23LGOlh0g,691 +PIL/_util.py,sha256=sX8hjjr5oCOQNLChyFM7lP5ZaKIpISa4Sc0vuclsR-4,388 +PIL/_version.py,sha256=EysGfU6KK_GeMzHdWqjQautdlfvMbJz2-zTCM2S_Y7Y,52 +PIL/_webp.cp310-win_amd64.pyd,sha256=etjtiR3VgY_4qiQl58LdsKrmxMi0JV6GCnDInW4YyHc,529920 +PIL/features.py,sha256=f9eCDiwFttGMs1p18jOM-6gogq-hW0paY8KViTSJujI,9707 +Pillow-9.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Pillow-9.3.0.dist-info/LICENSE,sha256=ArdgtDZIYDuj_TBEgy6XCcs4KYmE1C5qlXkBxZXaX98,55581 +Pillow-9.3.0.dist-info/METADATA,sha256=OikEqsc1kwkT7CmDu0k-iBgUt4PWQi5Zoeq3rC-Nazo,9222 +Pillow-9.3.0.dist-info/RECORD,, +Pillow-9.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Pillow-9.3.0.dist-info/WHEEL,sha256=W26pYN7HLsBT1jrDSL9udgf_mdNKJmYmL23sIP-FcgM,102 +Pillow-9.3.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +Pillow-9.3.0.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2 diff --git a/sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/REQUESTED b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/REQUESTED similarity index 100% rename from sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/REQUESTED rename to sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/REQUESTED diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/WHEEL b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/WHEEL new file mode 100644 index 0000000..93f1ef4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp310-cp310-win_amd64 + diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..b338169 --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +PIL diff --git a/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/zip-safe b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/sbsheriff/Lib/site-packages/Pillow-9.3.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/INSTALLER b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER similarity index 100% rename from sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/INSTALLER rename to sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/LICENSE b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/LICENSE new file mode 100644 index 0000000..91e18a6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/LICENSE @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/METADATA b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/METADATA new file mode 100644 index 0000000..1de4243 --- /dev/null +++ b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/METADATA @@ -0,0 +1,246 @@ +Metadata-Version: 2.1 +Name: PyNaCl +Version: 1.5.0 +Summary: Python binding to the Networking and Cryptography (NaCl) library +Home-page: https://github.com/pyca/pynacl/ +Author: The PyNaCl developers +Author-email: cryptography-dev@python.org +License: Apache License 2.0 +Platform: UNKNOWN +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: cffi (>=1.4.1) +Provides-Extra: docs +Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests' +Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests' + +=============================================== +PyNaCl: Python binding to the libsodium library +=============================================== + +.. image:: https://img.shields.io/pypi/v/pynacl.svg + :target: https://pypi.org/project/PyNaCl/ + :alt: Latest Version + +.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main + :target: https://codecov.io/github/pyca/pynacl?branch=main + +.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg + :target: https://pypi.org/project/PyNaCl/ + :alt: Compatible Python Versions + +PyNaCl is a Python binding to `libsodium`_, which is a fork of the +`Networking and Cryptography library`_. These libraries have a stated goal of +improving usability, security and speed. It supports Python 3.6+ as well as +PyPy 3. + +.. _libsodium: https://github.com/jedisct1/libsodium +.. _Networking and Cryptography library: https://nacl.cr.yp.to/ + +Features +-------- + +* Digital signatures +* Secret-key encryption +* Public-key encryption +* Hashing and message authentication +* Password based key derivation and password hashing + +`Changelog`_ +------------ + +.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/ + +Installation +============ + +Binary wheel install +-------------------- + +PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ , +so all dependencies are included. Make sure you have an up-to-date pip +and run: + +.. code-block:: console + + $ pip install pynacl + +Faster wheel build +------------------ + +You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make`` +and enable `parallelization`_: + +.. code-block:: console + + $ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl + +Linux source build +------------------ + +PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled +with PyNaCl so to install you can run: + +.. code-block:: console + + $ pip install pynacl + +If you'd prefer to use the version of ``libsodium`` provided by your +distribution, you can disable the bundled copy during install by running: + +.. code-block:: console + + $ SODIUM_INSTALL=system pip install pynacl + +.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools + is generally discouraged, and is completely unsupported in PyNaCl's case. + +.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html + +.. _libsodium: https://github.com/jedisct1/libsodium + +.. [#many] `manylinux1 wheels `_ + are built on a baseline linux environment based on Centos 5.11 + and should work on most x86 and x86_64 glibc based linux environments. + +Changelog +========= + +1.5.0 (2022-01-07) +------------------ + +* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5. +* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1`` + wheels. +* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS + ``universal2`` wheels (the latter supports macOS ``arm64``). +* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release). +* Add inline type hints. + +1.4.0 (2020-05-25) +------------------ + +* Update ``libsodium`` to 1.0.18. +* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1`` + wheels. Continuing to produce them was a maintenance burden. +* Added support for Python 3.8, and removed support for Python 3.4. +* Add low level bindings for extracting the seed and the public key + from crypto_sign_ed25519 secret key +* Add low level bindings for deterministic random generation. +* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485) +* Fix checks on very slow builders (#481, #495) +* Add low-level bindings to ed25519 arithmetic functions +* Update low-level blake2b state implementation +* Fix wrong short-input behavior of SealedBox.decrypt() (#517) +* Raise CryptPrefixError exception instead of InvalidkeyError when trying + to check a password against a verifier stored in a unknown format (#519) +* Add support for minimal builds of libsodium. Trying to call functions + not available in a minimal build will raise an UnavailableError + exception. To compile a minimal build of the bundled libsodium, set + the SODIUM_INSTALL_MINIMAL environment variable to any non-empty + string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup. + +1.3.0 2018-09-26 +---------------- + +* Added support for Python 3.7. +* Update ``libsodium`` to 1.0.16. +* Run and test all code examples in PyNaCl docs through sphinx's + doctest builder. +* Add low-level bindings for chacha20-poly1305 AEAD constructions. +* Add low-level bindings for the chacha20-poly1305 secretstream constructions. +* Add low-level bindings for ed25519ph pre-hashed signing construction. +* Add low-level bindings for constant-time increment and addition + on fixed-precision big integers represented as little-endian + byte sequences. +* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API. +* Add low-level bindings for libsodium's crypto_kx... key exchange + construction. +* Set hypothesis deadline to None in tests/test_pwhash.py to avoid + incorrect test failures on slower processor architectures. GitHub + issue #370 + +1.2.1 - 2017-12-04 +------------------ + +* Update hypothesis minimum allowed version. +* Infrastructure: add proper configuration for readthedocs builder + runtime environment. + +1.2.0 - 2017-11-01 +------------------ + +* Update ``libsodium`` to 1.0.15. +* Infrastructure: add jenkins support for automatic build of + ``manylinux1`` binary wheels +* Added support for ``SealedBox`` construction. +* Added support for ``argon2i`` and ``argon2id`` password hashing constructs + and restructured high-level password hashing implementation to expose + the same interface for all hashers. +* Added support for 128 bit ``siphashx24`` variant of ``siphash24``. +* Added support for ``from_seed`` APIs for X25519 keypair generation. +* Dropped support for Python 3.3. + +1.1.2 - 2017-03-31 +------------------ + +* reorder link time library search path when using bundled + libsodium + +1.1.1 - 2017-03-15 +------------------ + +* Fixed a circular import bug in ``nacl.utils``. + +1.1.0 - 2017-03-14 +------------------ + +* Dropped support for Python 2.6. +* Added ``shared_key()`` method on ``Box``. +* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or + ``SecretBox`` and it will automatically generate a random nonce. +* Added support for ``siphash24``. +* Added support for ``blake2b``. +* Added support for ``scrypt``. +* Update ``libsodium`` to 1.0.11. +* Default to the bundled ``libsodium`` when compiling. +* All raised exceptions are defined mixing-in + ``nacl.exceptions.CryptoError`` + +1.0.1 - 2016-01-24 +------------------ + +* Fix an issue with absolute paths that prevented the creation of wheels. + +1.0 - 2016-01-23 +---------------- + +* PyNaCl has been ported to use the new APIs available in cffi 1.0+. + Due to this change we no longer support PyPy releases older than 2.6. +* Python 3.2 support has been dropped. +* Functions to convert between Ed25519 and Curve25519 keys have been added. + +0.3.0 - 2015-03-04 +------------------ + +* The low-level API (`nacl.c.*`) has been changed to match the + upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). + The order of arguments and return values has changed significantly. To + avoid silent failures, `nacl.c` has been removed, and replaced with + `nacl.bindings` (with the new argument ordering). If you have code which + calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review + the new docstrings and update your code/imports to match the new + conventions. + + diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/RECORD b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/RECORD new file mode 100644 index 0000000..cffcd9e --- /dev/null +++ b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/RECORD @@ -0,0 +1,69 @@ +PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyNaCl-1.5.0.dist-info/LICENSE,sha256=d69bve2VkRS216XupRiyvjZOBPT0qV-eh9mHDCdxPSQ,9868 +PyNaCl-1.5.0.dist-info/METADATA,sha256=U9PLLkcKk_YC_Tl5OSoMiAmblalKU9qFsRtxiwa-TiM,8656 +PyNaCl-1.5.0.dist-info/RECORD,, +PyNaCl-1.5.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyNaCl-1.5.0.dist-info/WHEEL,sha256=nYCSW5p8tLyDU-wbqo3uRlCluAzwxLmyyRK2pVs4-Ag,100 +PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13 +nacl/__init__.py,sha256=W0JhVbFcnhlrEtK0tdRGDcX8aucbKCQmRBmf9hGFWQY,1155 +nacl/__pycache__/__init__.cpython-310.pyc,, +nacl/__pycache__/encoding.cpython-310.pyc,, +nacl/__pycache__/exceptions.cpython-310.pyc,, +nacl/__pycache__/hash.cpython-310.pyc,, +nacl/__pycache__/hashlib.cpython-310.pyc,, +nacl/__pycache__/public.cpython-310.pyc,, +nacl/__pycache__/secret.cpython-310.pyc,, +nacl/__pycache__/signing.cpython-310.pyc,, +nacl/__pycache__/utils.cpython-310.pyc,, +nacl/_sodium.pyd,sha256=1zPCPGpLIWJaT_B_ZWK6iCvL2w9QgmJpQZ2N4FdPiM0,348672 +nacl/bindings/__init__.py,sha256=erC7ta0z7XQSmfeYcnCVhFKqBtkkK6wJq8PKsEnjqSg,17448 +nacl/bindings/__pycache__/__init__.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_aead.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_box.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_core.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_generichash.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_hash.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_kx.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_pwhash.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_scalarmult.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_secretbox.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_secretstream.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_shorthash.cpython-310.pyc,, +nacl/bindings/__pycache__/crypto_sign.cpython-310.pyc,, +nacl/bindings/__pycache__/randombytes.cpython-310.pyc,, +nacl/bindings/__pycache__/sodium_core.cpython-310.pyc,, +nacl/bindings/__pycache__/utils.cpython-310.pyc,, +nacl/bindings/crypto_aead.py,sha256=fxSgpeI48HkxwsZEk9cTxigdJ5k7PvMh1mxr7M0LK9A,16156 +nacl/bindings/crypto_box.py,sha256=6M7qSZu806POBtEoK5e9Ui-JcSYaFDI6tTnllVJ0F7U,10463 +nacl/bindings/crypto_core.py,sha256=KccKcEGpoEsQjkYRcwpsBse-g_iGXNgQHeb1gCdZR4E,14148 +nacl/bindings/crypto_generichash.py,sha256=8nEG1ntZSqVOTAV6-_EZg-gNbV5UJc-SbDUa2GdkxRs,9133 +nacl/bindings/crypto_hash.py,sha256=8GO8el_rZgD2ITJyEbiejHXZimfrxckXc9cUppv1vpk,2238 +nacl/bindings/crypto_kx.py,sha256=8gmFRbU7NKHqCLZ1HsObilmeTboYFW6tEJCEUucje9Q,6923 +nacl/bindings/crypto_pwhash.py,sha256=sIugQ9Rx8KTy3vnOsQwz_kV2PhqwqVOsfbkR_sKnudc,19448 +nacl/bindings/crypto_scalarmult.py,sha256=gMNDgWl-P6ua8I3kAM2yt4dNzK9gY742C424z-yBMUU,8484 +nacl/bindings/crypto_secretbox.py,sha256=WEX2_Ea79pQkUshr_sVlslA28Zx_66tYFawW1qipleY,3000 +nacl/bindings/crypto_secretstream.py,sha256=3uZN9XET8AeFKn365K2gfcZKYjkOFfyLr8GCUw8Wx6g,11522 +nacl/bindings/crypto_shorthash.py,sha256=NDmmvG77ZhGLmbK0USoLW2dOveUcGdUFqL1fi5ozcDc,2684 +nacl/bindings/crypto_sign.py,sha256=IMKXZCAb8oF0tLQR6UcpJk0zTRFDeW4IWNvo9XdZfgw,10669 +nacl/bindings/randombytes.py,sha256=JmR-o2Bpj1ssJp8GDj4KgutPknEZGgogUSdnW4kRI5Q,1614 +nacl/bindings/sodium_core.py,sha256=8o4mDDahmmAIf4Xk3hJE5C-B6Ms5mbeu64_ylW9kY6I,1072 +nacl/bindings/utils.py,sha256=2WefZr1MOFTWKn6h6ODuw4u8D9PEfDWMaayXiPUm6RA,4439 +nacl/encoding.py,sha256=xKwjFb5F1jEfmazetwtaQpsUPvzYLh3RU6Wkiq56NzI,3020 +nacl/exceptions.py,sha256=gM8gN01HFGCUw8L1VjmEhBPf00r-igNJo-nAEsrhEBU,2539 +nacl/hash.py,sha256=_H8JOPecwna4GP8CaMorghfetykbuO9q4oteek9bfVo,6574 +nacl/hashlib.py,sha256=x74Z29ExS3e5yFV0Mzij9gnU_eRKWwkD6YPYfI5eUFw,4543 +nacl/public.py,sha256=kVoz1R9zkNnczDUsBDnYYXL_6C9jsga6ph3rk32uFNo,15215 +nacl/pwhash/__init__.py,sha256=2vX9OivKZsrMVjh2vjfbdBVDsUml8AEnIRsa1d6ZzG0,2750 +nacl/pwhash/__pycache__/__init__.cpython-310.pyc,, +nacl/pwhash/__pycache__/_argon2.cpython-310.pyc,, +nacl/pwhash/__pycache__/argon2i.cpython-310.pyc,, +nacl/pwhash/__pycache__/argon2id.cpython-310.pyc,, +nacl/pwhash/__pycache__/scrypt.cpython-310.pyc,, +nacl/pwhash/_argon2.py,sha256=uR1Y_DnX8RZIfmTHborlXWcJ-re_wDryOIhVYssjj8I,1828 +nacl/pwhash/argon2i.py,sha256=dJmM_bVP0LALK9I4TfaZsWqEah7z7sprd3mw8TRPr-4,4537 +nacl/pwhash/argon2id.py,sha256=dJ7kpU-b07YzMPlrsMW9K_tV-xnA0bE7pj-ZFo-z0wc,4568 +nacl/pwhash/scrypt.py,sha256=_qbeDojJzzE42RvwwUsELA8pGgXDInb1ht6ec1w3_1Y,7197 +nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nacl/secret.py,sha256=YShCAvKF7sHAGOBRSnWKMJvJChQ7SuhlQXRCz32MdrM,12413 +nacl/signing.py,sha256=CR9ZRXYSrdZFkNiIQTIplFS8UQ0Z_OzXqaaQ_HdhSQ4,8587 +nacl/utils.py,sha256=iEQ0LtPtyV7Cqo4wpn9CvQunwsM5ShEZlO9IvT1r4dk,2429 diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/REQUESTED b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/WHEEL b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/WHEEL new file mode 100644 index 0000000..0ba1b7b --- /dev/null +++ b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp36-abi3-win_amd64 + diff --git a/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..f52507f --- /dev/null +++ b/sbsheriff/Lib/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_sodium +nacl diff --git a/sbsheriff/Lib/site-packages/__pycache__/six.cpython-310.pyc b/sbsheriff/Lib/site-packages/__pycache__/six.cpython-310.pyc new file mode 100644 index 0000000..5adc06e Binary files /dev/null and b/sbsheriff/Lib/site-packages/__pycache__/six.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/__pycache__/win32_setctime.cpython-310.pyc b/sbsheriff/Lib/site-packages/__pycache__/win32_setctime.cpython-310.pyc new file mode 100644 index 0000000..2952450 Binary files /dev/null and b/sbsheriff/Lib/site-packages/__pycache__/win32_setctime.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/_cffi_backend.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/_cffi_backend.cp310-win_amd64.pyd new file mode 100644 index 0000000..b4cfe51 Binary files /dev/null and b/sbsheriff/Lib/site-packages/_cffi_backend.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/INSTALLER similarity index 100% rename from sbsheriff/Lib/site-packages/pip-22.2.2.dist-info/top_level.txt rename to sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/INSTALLER diff --git a/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/LICENSE b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/METADATA b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/METADATA new file mode 100644 index 0000000..c65388e --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 3.6.2 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6.2 +License-File: LICENSE +Requires-Dist: idna (>=2.8) +Requires-Dist: sniffio (>=1.1) +Requires-Dist: contextvars ; python_version < "3.7" +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' +Requires-Dist: hypothesis (>=4.0) ; extra == 'test' +Requires-Dist: pytest (>=7.0) ; extra == 'test' +Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test' +Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' +Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio (<0.22,>=0.16) ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/RECORD b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/RECORD new file mode 100644 index 0000000..423a914 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-3.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-3.6.2.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-3.6.2.dist-info/METADATA,sha256=30LcHBTF6U8AO5jpASPwKp9dqHL3t0MuCADwSLWMFrc,4660 +anyio-3.6.2.dist-info/RECORD,, +anyio-3.6.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +anyio-3.6.2.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-3.6.2.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=M2R8dk6L5gL5lXHArzpSfEn2oH5jMyUKhzyrkRiv2AM,4037 +anyio/__pycache__/__init__.cpython-310.pyc,, +anyio/__pycache__/from_thread.cpython-310.pyc,, +anyio/__pycache__/lowlevel.cpython-310.pyc,, +anyio/__pycache__/pytest_plugin.cpython-310.pyc,, +anyio/__pycache__/to_process.cpython-310.pyc,, +anyio/__pycache__/to_thread.cpython-310.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-310.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-310.pyc,, +anyio/_backends/__pycache__/_trio.cpython-310.pyc,, +anyio/_backends/_asyncio.py,sha256=ZJDvRwfS4wv9WWcqWledNJyl8hx8A8-m9-gSKAJ6nBM,69238 +anyio/_backends/_trio.py,sha256=CebCaqr8Szi6uCnUzwtBRLfUitR5OnDT_wfH-KiqvBQ,29696 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-310.pyc,, +anyio/_core/__pycache__/_compat.cpython-310.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-310.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-310.pyc,, +anyio/_core/__pycache__/_fileio.cpython-310.pyc,, +anyio/_core/__pycache__/_resources.cpython-310.pyc,, +anyio/_core/__pycache__/_signals.cpython-310.pyc,, +anyio/_core/__pycache__/_sockets.cpython-310.pyc,, +anyio/_core/__pycache__/_streams.cpython-310.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-310.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-310.pyc,, +anyio/_core/__pycache__/_tasks.cpython-310.pyc,, +anyio/_core/__pycache__/_testing.cpython-310.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-310.pyc,, +anyio/_core/_compat.py,sha256=X99W70r-O-JLdkKNtbddcIY5H2Nyg3Nk34oUYE9WZRs,5790 +anyio/_core/_eventloop.py,sha256=DRn_hy679LtsJFsPX7dXjDv72bLtSFkTnWY9WVVfgCQ,4108 +anyio/_core/_exceptions.py,sha256=1wqraNldZroYkoyB0HZStAruz_7yDCBaW-4zYwsKj8s,2904 +anyio/_core/_fileio.py,sha256=au82uZXZX4fia8EoZq_E-JDwZFKe6ZtI0J6IkxK8FmQ,18298 +anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399 +anyio/_core/_signals.py,sha256=D4btJN527tAADspKBeNKaCds-ZcEZJP8LWM_MjVuQRA,827 +anyio/_core/_sockets.py,sha256=fW_Cbg6kfw4xgYuVuWbcWrAYspOcDSEjwxVATMzf2fo,19820 +anyio/_core/_streams.py,sha256=gjT5xChJ1OoV8nNinljSv1yW4nqUS-QzZzIydQz3exQ,1494 +anyio/_core/_subprocesses.py,sha256=pcchMI2OII0QSjiVxRiTEz4M0B7TlQPzGurfCuka-xc,5049 +anyio/_core/_synchronization.py,sha256=xOOG4hF9783N6E2IcD3YKiukguA5bPrj6BodDsKNaJY,16822 +anyio/_core/_tasks.py,sha256=ebGLjHvwL6I9aGyPwvCig1drebSVYFzvY3pnN3TsB4o,5273 +anyio/_core/_testing.py,sha256=VZka_yebIhJ6mJ6Vo_ilO3Nbz53ieqg0WBijwciMwdY,2196 +anyio/_core/_typedattr.py,sha256=k5-wBvMlDlKHIpn18INVnXAlGwI3CrAvPmWoceHjnOQ,2534 +anyio/abc/__init__.py,sha256=hMa47CMs5O1twC2bBcSbzwX-3Q08BAgAPTRekQobb3E,2123 +anyio/abc/__pycache__/__init__.cpython-310.pyc,, +anyio/abc/__pycache__/_resources.cpython-310.pyc,, +anyio/abc/__pycache__/_sockets.cpython-310.pyc,, +anyio/abc/__pycache__/_streams.cpython-310.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-310.pyc,, +anyio/abc/__pycache__/_tasks.cpython-310.pyc,, +anyio/abc/__pycache__/_testing.cpython-310.pyc,, +anyio/abc/_resources.py,sha256=js737mWPG6IW0fH8W4Tz9eNWLztse7dKxEC61z934Vk,752 +anyio/abc/_sockets.py,sha256=i1VdcJTLAuRlYeZoL6s5RBSWbX62Cu6ln5YZBL2YrWk,5754 +anyio/abc/_streams.py,sha256=0g70fhKAzbnK0KKmWwRgwmKdApBwduAcVj4TpjSzjzU,6501 +anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071 +anyio/abc/_tasks.py,sha256=mQQd1DANqpySKyehVVPdMfi_UEG49zZUJpt5blunOjg,3119 +anyio/abc/_testing.py,sha256=ifKCUPzcQdHAEGO-weu2GQvzjMQPPIWO24mQ0z6zkdU,1928 +anyio/from_thread.py,sha256=nSq6mafYMqwxKmzdJyISg8cp-AyBj9rxZPMt_b7klSM,16497 +anyio/lowlevel.py,sha256=W4ydshns7f86YuSESFc2igTf46AWMXnGPQGsY_Esl2E,4679 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=kWj2B8BJehePJd1sztRBmJBRh8O4hk1oGSYQRlX5Gr8,5134 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-310.pyc,, +anyio/streams/__pycache__/buffered.cpython-310.pyc,, +anyio/streams/__pycache__/file.cpython-310.pyc,, +anyio/streams/__pycache__/memory.cpython-310.pyc,, +anyio/streams/__pycache__/stapled.cpython-310.pyc,, +anyio/streams/__pycache__/text.cpython-310.pyc,, +anyio/streams/__pycache__/tls.cpython-310.pyc,, +anyio/streams/buffered.py,sha256=FegOSO4Xcxa5SaDfU1A3ZkTTxaPrv6G435Y_giZ8k44,4437 +anyio/streams/file.py,sha256=pujJ-m6BX-gOLnVoZwkE5kh-YDs5Vx9eJFVkvliQ0S4,4353 +anyio/streams/memory.py,sha256=3RGeZoevoGIgBWfD2_X1cqxIPOz-BqQkRf6lUcOnBYc,9209 +anyio/streams/stapled.py,sha256=0E0V15v8M5GVelpHe5RT0S33tQ9hGe4ZCXo_KJEjtt4,4258 +anyio/streams/text.py,sha256=WRFyjsRpBjQKdCmR4ZuzYTEAJqGx2s5oTJmGI1C6Ng0,5014 +anyio/streams/tls.py,sha256=-WXGsMV14XHXAxc38WpBvGusjuY7e449g4UCEHIlnWw,12040 +anyio/to_process.py,sha256=hu0ES3HJC-VEjcdPJMzAzjyTaekaCNToO3coj3jvnus,9247 +anyio/to_thread.py,sha256=VeMQoo8Va2zz0WFk2p123QikDpqk2wYZGw20COC3wqw,2124 diff --git a/sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/WHEEL b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/WHEEL similarity index 100% rename from sbsheriff/Lib/site-packages/disnake-2.5.2.dist-info/WHEEL rename to sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/WHEEL diff --git a/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/entry_points.txt b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio-3.6.2.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/sbsheriff/Lib/site-packages/anyio/__init__.py b/sbsheriff/Lib/site-packages/anyio/__init__.py new file mode 100644 index 0000000..6e81178 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/__init__.py @@ -0,0 +1,167 @@ +__all__ = ( + "maybe_async", + "maybe_async_cm", + "run", + "sleep", + "sleep_forever", + "sleep_until", + "current_time", + "get_all_backends", + "get_cancelled_exc_class", + "BrokenResourceError", + "BrokenWorkerProcess", + "BusyResourceError", + "ClosedResourceError", + "DelimiterNotFound", + "EndOfStream", + "ExceptionGroup", + "IncompleteRead", + "TypedAttributeLookupError", + "WouldBlock", + "AsyncFile", + "Path", + "open_file", + "wrap_file", + "aclose_forcefully", + "open_signal_receiver", + "connect_tcp", + "connect_unix", + "create_tcp_listener", + "create_unix_listener", + "create_udp_socket", + "create_connected_udp_socket", + "getaddrinfo", + "getnameinfo", + "wait_socket_readable", + "wait_socket_writable", + "create_memory_object_stream", + "run_process", + "open_process", + "create_lock", + "CapacityLimiter", + "CapacityLimiterStatistics", + "Condition", + "ConditionStatistics", + "Event", + "EventStatistics", + "Lock", + "LockStatistics", + "Semaphore", + "SemaphoreStatistics", + "create_condition", + "create_event", + "create_semaphore", + "create_capacity_limiter", + "open_cancel_scope", + "fail_after", + "move_on_after", + "current_effective_deadline", + "TASK_STATUS_IGNORED", + "CancelScope", + "create_task_group", + "TaskInfo", + "get_current_task", + "get_running_tasks", + "wait_all_tasks_blocked", + "run_sync_in_worker_thread", + "run_async_from_thread", + "run_sync_from_thread", + "current_default_worker_thread_limiter", + "create_blocking_portal", + "start_blocking_portal", + "typed_attribute", + "TypedAttributeSet", + "TypedAttributeProvider", +) + +from typing import Any + +from ._core._compat import maybe_async, maybe_async_cm +from ._core._eventloop import ( + current_time, + get_all_backends, + get_cancelled_exc_class, + run, + sleep, + sleep_forever, + sleep_until, +) +from ._core._exceptions import ( + BrokenResourceError, + BrokenWorkerProcess, + BusyResourceError, + ClosedResourceError, + DelimiterNotFound, + EndOfStream, + ExceptionGroup, + IncompleteRead, + TypedAttributeLookupError, + WouldBlock, +) +from ._core._fileio import AsyncFile, Path, open_file, wrap_file +from ._core._resources import aclose_forcefully +from ._core._signals import open_signal_receiver +from ._core._sockets import ( + connect_tcp, + connect_unix, + create_connected_udp_socket, + create_tcp_listener, + create_udp_socket, + create_unix_listener, + getaddrinfo, + getnameinfo, + wait_socket_readable, + wait_socket_writable, +) +from ._core._streams import create_memory_object_stream +from ._core._subprocesses import open_process, run_process +from ._core._synchronization import ( + CapacityLimiter, + CapacityLimiterStatistics, + Condition, + ConditionStatistics, + Event, + EventStatistics, + Lock, + LockStatistics, + Semaphore, + SemaphoreStatistics, + create_capacity_limiter, + create_condition, + create_event, + create_lock, + create_semaphore, +) +from ._core._tasks import ( + TASK_STATUS_IGNORED, + CancelScope, + create_task_group, + current_effective_deadline, + fail_after, + move_on_after, + open_cancel_scope, +) +from ._core._testing import ( + TaskInfo, + get_current_task, + get_running_tasks, + wait_all_tasks_blocked, +) +from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute + +# Re-exported here, for backwards compatibility +# isort: off +from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread +from .from_thread import ( + create_blocking_portal, + run_async_from_thread, + run_sync_from_thread, + start_blocking_portal, +) + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio."): + value.__module__ = __name__ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..d846850 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc new file mode 100644 index 0000000..b470e22 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc new file mode 100644 index 0000000..3ac329a Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000..06cd952 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/to_process.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/to_process.cpython-310.pyc new file mode 100644 index 0000000..faabd55 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/to_process.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc new file mode 100644 index 0000000..ba4be89 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/__init__.py b/sbsheriff/Lib/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..fdd80bd Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc new file mode 100644 index 0000000..4bd34f8 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc new file mode 100644 index 0000000..50160cd Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/_asyncio.py b/sbsheriff/Lib/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..d2bbc94 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2181 @@ +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from concurrent.futures import Future +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + GEN_RUNNING, + GEN_SUSPENDED, + getcoroutinestate, + getgeneratorstate, +) +from io import IOBase +from os import PathLike +from queue import Queue +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + Coroutine, + Deque, + Dict, + Generator, + Iterable, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._exceptions import WouldBlock +from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType +from ..lowlevel import RunVar + +if sys.version_info >= (3, 8): + + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task.get_coro() + +else: + + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task._coro + + +if sys.version_info >= (3, 7): + from asyncio import all_tasks, create_task, current_task, get_running_loop + from asyncio import run as native_run + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return [cb for cb, context in task._callbacks] # type: ignore[attr-defined] + +else: + _T = TypeVar("_T") + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return task._callbacks + + def native_run(main, *, debug=False): + # Snatched from Python 3.7 + from asyncio import coroutines, events, tasks + + def _cancel_all_tasks(loop): + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + tasks.gather(*to_cancel, loop=loop, return_exceptions=True) + ) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + if events._get_running_loop() is not None: + raise RuntimeError( + "asyncio.run() cannot be called from a running event loop" + ) + + if not coroutines.iscoroutine(main): + raise ValueError(f"a coroutine was expected, got {main!r}") + + loop = events.new_event_loop() + try: + events.set_event_loop(loop) + loop.set_debug(debug) + return loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + events.set_event_loop(None) + loop.close() + + def create_task( + coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, name: object = None + ) -> asyncio.Task: + return get_running_loop().create_task(coro) + + def get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio._get_running_loop() + if loop is not None: + return loop + else: + raise RuntimeError("no running event loop") + + def all_tasks( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Set[asyncio.Task]: + """Return a set of all tasks for the loop.""" + from asyncio import Task + + if loop is None: + loop = get_running_loop() + + return {t for t in Task.all_tasks(loop) if not t.done()} + + def current_task( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Optional[asyncio.Task]: + if loop is None: + loop = get_running_loop() + + return asyncio.Task.current_task(loop) + + +T_Retval = TypeVar("T_Retval") + +# Check whether there is native support for task names in asyncio (3.8+) +_native_task_names = hasattr(asyncio.Task, "get_name") + + +_root_task: RunVar[Optional[asyncio.Task]] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + for cb in _get_task_callbacks(task): + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars = ( + WeakKeyDictionary() +) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] + +current_token = get_running_loop + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + coro = cast(Coroutine[Any, Any, Any], get_coro(task)) + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + try: + return getgeneratorstate(cast(Generator, coro)) in ( + GEN_RUNNING, + GEN_SUSPENDED, + ) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") + + +def _maybe_set_event_loop_policy( + policy: Optional[asyncio.AbstractEventLoopPolicy], use_uvloop: bool +) -> None: + # On CPython, use uvloop when possible if no other policy has been given and if not + # explicitly disabled + if policy is None and use_uvloop and sys.implementation.name == "cpython": + try: + import uvloop + except ImportError: + pass + else: + # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) + if not hasattr( + asyncio.AbstractEventLoop, "shutdown_default_executor" + ) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"): + policy = uvloop.EventLoopPolicy() + + if policy is not None: + asyncio.set_event_loop_policy(policy) + + +def run( + func: Callable[..., Awaitable[T_Retval]], + *args: object, + debug: bool = False, + use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None, +) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task_state = TaskState(None, get_callable_name(func), None) + _task_states[task] = task_state + if _native_task_names: + task.set_name(task_state.name) + + try: + return await func(*args) + finally: + del _task_states[task] + + _maybe_set_event_loop_policy(policy, use_uvloop) + return native_run(wrapper(), debug=debug) + + +# +# Miscellaneous +# + +sleep = asyncio.sleep + + +# +# Timeouts and cancellation +# + +CancelledError = asyncio.CancelledError + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> "CancelScope": + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: Optional[CancelScope] = None + self._cancel_called = False + self._active = False + self._timeout_handle: Optional[asyncio.TimerHandle] = None + self._cancel_handle: Optional[asyncio.Handle] = None + self._tasks: Set[asyncio.Task] = set() + self._host_task: Optional[asyncio.Task] = None + self._timeout_expired = False + + def __enter__(self) -> "CancelScope": + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_name = host_task.get_name() if _native_task_names else None + task_state = TaskState(None, task_name, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + + self._timeout() + self._active = True + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the farthest directly cancelled parent scope if this + # one was shielded + if self._shield: + self._deliver_cancellation_to_parent() + + if exc_val is not None: + exceptions = ( + exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] + ) + if all(isinstance(exc, CancelledError) for exc in exceptions): + if self._timeout_expired: + return True + elif not self._cancel_called: + # Task was cancelled natively + return None + elif not self._parent_cancelled(): + # This scope was directly cancelled + return True + + return None + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self._timeout_expired = True + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self) -> None: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for cancellation. + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started and is not in a cancel + # scope shielded from this one + cancel_scope = _task_states[task].cancel_scope + while cancel_scope is not self: + if cancel_scope is None or cancel_scope._shield: + break + else: + cancel_scope = cancel_scope._parent_scope + else: + should_retry = True + if task is not current and ( + task is self._host_task or _task_started(task) + ): + task.cancel() + + # Schedule another callback if there are still tasks left + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation + ) + else: + self._cancel_handle = None + + def _deliver_cancellation_to_parent(self) -> None: + """Start cancellation effort in the farthest directly cancelled parent scope""" + scope = self._parent_scope + scope_to_cancel: Optional[CancelScope] = None + while scope is not None: + if scope._cancel_called and scope._cancel_handle is None: + scope_to_cancel = scope + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + if scope_to_cancel is not None: + scope_to_cancel._deliver_cancellation() + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> DeprecatedAwaitable: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._deliver_cancellation() + + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._deliver_cancellation_to_parent() + + +async def checkpoint() -> None: + await sleep(0) + + +async def checkpoint_if_cancelled() -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + +async def cancel_shielded_checkpoint() -> None: + with CancelScope(shield=True): + await sleep(0) + + +def current_effective_deadline() -> float: + try: + cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + +def current_time() -> float: + return get_running_loop().time() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance itself + because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "name", "cancel_scope" + + def __init__( + self, + parent_id: Optional[int], + name: Optional[str], + cancel_scope: Optional[CancelScope], + ): + self.parent_id = parent_id + self.name = name + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + + +class ExceptionGroup(BaseExceptionGroup): + def __init__(self, exceptions: List[BaseException]): + super().__init__() + self.exceptions = exceptions + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: object = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: List[BaseException] = [] + + async def __aenter__(self) -> "TaskGroup": + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + self._exceptions.append(exc_val) + + while self.cancel_scope._tasks: + try: + await asyncio.wait(self.cancel_scope._tasks) + except asyncio.CancelledError: + self.cancel_scope.cancel() + + self._active = False + if not self.cancel_scope._parent_cancelled(): + exceptions = self._filter_cancellation_errors(self._exceptions) + else: + exceptions = self._exceptions + + try: + if len(exceptions) > 1: + if all( + isinstance(e, CancelledError) and not e.args for e in exceptions + ): + # Tasks were cancelled natively, without a cancellation message + raise CancelledError + else: + raise ExceptionGroup(exceptions) + elif exceptions and exceptions[0] is not exc_val: + raise exceptions[0] + except BaseException as exc: + # Clear the context here, as it can only be done in-flight. + # If the context is not cleared, it can result in recursive tracebacks (see #145). + exc.__context__ = None + raise + + return ignore_exception + + @staticmethod + def _filter_cancellation_errors( + exceptions: Sequence[BaseException], + ) -> List[BaseException]: + filtered_exceptions: List[BaseException] = [] + for exc in exceptions: + if isinstance(exc, ExceptionGroup): + new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) + if len(new_exceptions) > 1: + filtered_exceptions.append(exc) + elif len(new_exceptions) == 1: + filtered_exceptions.append(new_exceptions[0]) + elif new_exceptions: + new_exc = ExceptionGroup(new_exceptions) + new_exc.__cause__ = exc.__cause__ + new_exc.__context__ = exc.__context__ + new_exc.__traceback__ = exc.__traceback__ + filtered_exceptions.append(new_exc) + elif not isinstance(exc, CancelledError) or exc.args: + filtered_exceptions.append(exc) + + return filtered_exceptions + + async def _run_wrapped_task( + self, coro: Coroutine, task_status_future: Optional[asyncio.Future] + ) -> None: + # This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises + # a BaseException. + __traceback_hide__ = __tracebackhide__ = True # noqa: F841 + task = cast(asyncio.Task, current_task()) + try: + await coro + except BaseException as exc: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + else: + if task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + finally: + if task in self.cancel_scope._tasks: + self.cancel_scope._tasks.remove(task) + del _task_states[task] + + def _spawn( + self, + func: Callable[..., Coroutine], + args: tuple, + name: object, + task_status_future: Optional[asyncio.Future] = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + # This is the code path for Python 3.8+ + assert _task in self.cancel_scope._tasks + self.cancel_scope._tasks.remove(_task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + options = {} + name = get_callable_name(func) if name is None else str(name) + if _native_task_names: + options["name"] = name + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not asyncio.iscoroutine(coro): + raise TypeError( + f"Expected an async function, but {func} appears to be synchronous" + ) + + foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame") + if foreign_coro or sys.version_info < (3, 8): + coro = self._run_wrapped_task(coro, task_status_future) + + task = create_task(coro, **options) + if not foreign_coro and sys.version_info >= (3, 8): + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, name=name, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + return task + + def start_soon( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> None: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch point + # between, the task group is cancelled and this method never proceeds to process the + # completed future. That's why we have to have a shielded cancel scope here. + with CancelScope(shield=True): + try: + return await future + except CancelledError: + task.cancel() + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: Set["WorkerThread"], + idle_workers: Deque["WorkerThread"], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + Union[Tuple[Context, Callable, tuple, asyncio.Future], None] + ] = Queue(2) + self.idle_since = current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: Optional[BaseException] + ) -> None: + self.idle_since = current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread("asyncio"): + threadlocals.loop = self.loop + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future = item + if not future.cancelled(): + result = None + exception: Optional[BaseException] = None + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + self.queue.task_done() + + def stop(self, f: Optional[asyncio.Task] = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar("_threadpool_workers") + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional["CapacityLimiter"] = None, +) -> T_Retval: + await checkpoint() + + # If this is the first run in this event loop thread, set up the necessary variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with (limiter or current_default_thread_limiter()): + with CancelScope(shield=not cancellable): + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer + now = current_time() + while idle_workers: + if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback(expired_worker.stop) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + worker.queue.put_nowait((context, func, args, future)) + return await future + + +def run_sync_from_thread( + func: Callable[..., T_Retval], + *args: object, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = loop or threadlocals.loop + if sys.version_info < (3, 7): + loop.call_soon_threadsafe(copy_context().run, wrapper) + else: + loop.call_soon_threadsafe(wrapper) + + return f.result() + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( + func(*args), threadlocals.loop + ) + return f.result() + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + loop=self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: Optional[StreamWriterWrapper] + _stdout: Optional[StreamReaderWrapper] + _stderr: Optional[StreamReaderWrapper] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + shell: bool, + stdin: Union[int, IO[Any], None], + stdout: Union[int, IO[Any], None], + stderr: Union[int, IO[Any], None], + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + await checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + cast(Union[str, bytes], command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +def _forcibly_shutdown_process_pool_on_exit( + workers: Set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: Optional[asyncio.AbstractChildWatcher] + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + child_watcher = None + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + + """ + process: Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + kwargs = {"name": "AnyIO process pool shutdown task"} if _native_task_names else {} + create_task(_shutdown_process_pool_on_exit(workers), **kwargs) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) + ) + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: Deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Optional[Exception]) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> Optional[bool]: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: Deque[Tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await checkpoint() + + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will block until + # data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class UNIXSocketStream(abc.SocketStream): + _receive_future: Optional[asyncio.Future] = None + _send_future: Optional[asyncio.Future] = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + self._loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await checkpoint() + with self._receive_guard: + while True: + try: + data = self.__raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self.__raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self.__raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self.__raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: Optional[CancelScope] = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(cast(asyncio.Transport, transport), protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +async def connect_tcp( + host: str, port: int, local_addr: Optional[Tuple[str, int]] = None +) -> SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_addr + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + +async def connect_unix(path: str) -> UNIXSocketStream: + await checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool, +) -> Union[UDPSocket, ConnectedUDPSocket]: + result = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + transport = cast(asyncio.DatagramTransport, result[0]) + protocol = result[1] + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + +async def getaddrinfo( + host: Union[bytes, str], + port: Union[str, int, None], + *, + family: Union[int, AddressFamily] = 0, + type: Union[int, SocketKind] = 0, + proto: int = 0, + flags: int = 0, +) -> GetAddrInfoReturnType: + # https://github.com/python/typeshed/pull/4304 + result = await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + return cast(GetAddrInfoReturnType, result) + + +async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + +_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("write_events") + + +async def wait_socket_readable(sock: socket.socket) -> None: + await checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError("reading from") from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + +async def wait_socket_writable(sock: socket.socket) -> None: + await checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError("writing to") from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> DeprecatedAwaitable: + self._event.set() + return DeprecatedAwaitable(self.set) + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if await self._event.wait(): + await checkpoint() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: Set[Any] = set() + self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + old_value = self._total_tokens + self._total_tokens = value + events = [] + for event in self._wait_queue.values(): + if value <= old_value: + break + + if not event.is_set(): + events.append(event) + old_value += 1 + + for event in events: + event.set() + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.acquire_on_behalf_of_nowait(current_task()) + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's " + "tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's " "tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem()[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + +# +# Operating system signals +# + + +class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): + def __init__(self, signals: Tuple[int, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: Deque[int] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: Set[int] = set() + + def _deliver(self, signum: int) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> "_SignalReceiver": + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> "_SignalReceiver": + return self + + async def __anext__(self) -> int: + await checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + return _SignalReceiver(signals) + + +# +# Testing and debugging +# + + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + name = task.get_name() if _native_task_names else None + parent_id = None + else: + name = task_state.name + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, name, get_coro(task)) + + +def get_current_task() -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + +def get_running_tasks() -> List[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + +async def wait_all_tasks_blocked() -> None: + await checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] + await sleep(0.1) + break + else: + return + + +class TestRunner(abc.TestRunner): + def __init__( + self, + debug: bool = False, + use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None, + ): + self._exceptions: List[BaseException] = [] + _maybe_set_event_loop_policy(policy, use_uvloop) + self._loop = asyncio.new_event_loop() + self._loop.set_debug(debug) + self._loop.set_exception_handler(self._exception_handler) + asyncio.set_event_loop(self._loop) + + def _cancel_all_tasks(self) -> None: + to_cancel = all_tasks(self._loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + self._loop.run_until_complete( + asyncio.gather(*to_cancel, return_exceptions=True) + ) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + raise cast(BaseException, task.exception()) + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: Dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise ExceptionGroup(exceptions) + + def close(self) -> None: + try: + self._cancel_all_tasks() + self._loop.run_until_complete(self._loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + self._loop.close() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: Dict[str, Any], + ) -> Iterable[T_Retval]: + async def fixture_runner() -> None: + agen = fixture_func(**kwargs) + try: + retval = await agen.asend(None) + self._raise_async_exceptions() + except BaseException as exc: + f.set_exception(exc) + return + else: + f.set_result(retval) + + await event.wait() + try: + await agen.asend(None) + except StopAsyncIteration: + pass + else: + await agen.aclose() + raise RuntimeError("Async generator fixture did not stop") + + f = self._loop.create_future() + event = asyncio.Event() + fixture_task = self._loop.create_task(fixture_runner()) + self._loop.run_until_complete(f) + yield f.result() + event.set() + self._loop.run_until_complete(fixture_task) + self._raise_async_exceptions() + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: Dict[str, Any], + ) -> T_Retval: + retval = self._loop.run_until_complete(fixture_func(**kwargs)) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + try: + self._loop.run_until_complete(test_func(**kwargs)) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() diff --git a/sbsheriff/Lib/site-packages/anyio/_backends/_trio.py b/sbsheriff/Lib/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..cf2aaec --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,988 @@ +import array +import math +import socket +from concurrent.futures import Future +from contextvars import copy_context +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Deque, + Dict, + Generic, + Iterable, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import sniffio +import trio.from_thread +from outcome import Error, Outcome, Value +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._sockets import convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType + +if TYPE_CHECKING: + from trio_typing import TaskStatus + +try: + from trio import lowlevel as trio_lowlevel +except ImportError: + from trio import hazmat as trio_lowlevel # type: ignore[no-redef] + from trio.hazmat import wait_readable, wait_writable +else: + from trio.lowlevel import wait_readable, wait_writable + +try: + trio_open_process = trio_lowlevel.open_process # type: ignore[attr-defined] +except AttributeError: + from trio import open_process as trio_open_process + +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) + + +# +# Event loop +# + +run = trio.run +current_token = trio.lowlevel.current_trio_token +RunVar = trio.lowlevel.RunVar + + +# +# Miscellaneous +# + +sleep = trio.sleep + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: Optional[trio.CancelScope] = None, **kwargs: object + ) -> "CancelScope": + return object.__new__(cls) + + def __init__( + self, original: Optional[trio.CancelScope] = None, **kwargs: Any + ) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> "CancelScope": + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> DeprecatedAwaitable: + self.__original.cancel() + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +CancelledError = trio.Cancelled +checkpoint = trio.lowlevel.checkpoint +checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled +cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint +current_effective_deadline = trio.current_effective_deadline +current_time = trio.current_time + + +# +# Task groups +# + + +class ExceptionGroup(BaseExceptionGroup, trio.MultiError): + pass + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery() + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> "TaskGroup": + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except trio.MultiError as exc: + raise ExceptionGroup(exc.exceptions) from None + finally: + self._active = False + + def start_soon(self, func: Callable, *args: object, name: object = None) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> object: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[trio.CapacityLimiter] = None, +) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread("trio"): + return func(*args) + + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + return await run_sync( + context.run, wrapper, cancellable=cancellable, limiter=limiter + ) + + +# TODO: remove this workaround when trio 0.20 is the minimum requirement +def run_async_from_thread( + fn: Callable[..., Awaitable[T_Retval]], *args: Any +) -> T_Retval: + async def wrapper() -> T_Retval: + retval: T_Retval + + async def inner() -> None: + nonlocal retval + __tracebackhide__ = True + retval = await fn(*args) + + async with trio.open_nursery() as n: + context.run(n.start_soon, inner) + + __tracebackhide__ = True + return retval + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "trio") + return trio.from_thread.run(wrapper) + + +def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + retval = trio.from_thread.run_sync(copy_context().run, fn, *args) + return cast(T_Retval, retval) + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "trio") + trio.from_thread.run_sync( + context.run, + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: Optional[int] = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: Optional[abc.ByteSendStream] + _stdout: Optional[abc.ByteReceiveStream] + _stderr: Optional[abc.ByteReceiveStream] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + shell: bool, + stdin: Union[int, IO[Any], None], + stdout: Union[int, IO[Any], None], + stderr: Union[int, IO[Any], None], + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + process = await trio_open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=shell, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: Set[Process]) -> None: + process: Process + try: + await sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> "NoReturn": + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, # type: ignore[list-item] + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +async def connect_tcp( + host: str, port: int, local_address: Optional[IPSockAddrType] = None +) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + +async def connect_unix(path: str) -> UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool, +) -> Union[UDPSocket, ConnectedUDPSocket]: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + +getaddrinfo = trio.socket.getaddrinfo +getnameinfo = trio.socket.getnameinfo + + +async def wait_socket_readable(sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + +async def wait_socket_writable(sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> DeprecatedAwaitable: + self.__original.set() + return DeprecatedAwaitable(self.set) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__( + self, *args: Any, original: Optional[trio.CapacityLimiter] = None + ) -> None: + self.__original = original or trio.CapacityLimiter(*args) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.__original.acquire_nowait() + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + self.__original.acquire_on_behalf_of_nowait(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=orig.borrowers, + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper") + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + +# +# Signal handling +# + + +class _SignalReceiver(DeprecatedAsyncContextManager[T]): + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + def __enter__(self) -> T: + return self._cm.__enter__() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def open_signal_receiver(*signals: Signals) -> _SignalReceiver: + cm = trio.open_signal_receiver(*signals) + return _SignalReceiver(cm) + + +# +# Testing and debugging +# + + +def get_current_task() -> TaskInfo: + task = trio_lowlevel.current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + +def get_running_tasks() -> List[TaskInfo]: + root_task = trio_lowlevel.current_root_task() + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: List[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro) + ) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + +def wait_all_tasks_blocked() -> Awaitable[None]: + import trio.testing + + return trio.testing.wait_all_tasks_blocked() + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from collections import deque + from queue import Queue + + self._call_queue: "Queue[Callable[..., object]]" = Queue() + self._result_queue: Deque[Outcome] = deque() + self._stop_event: Optional[trio.Event] = None + self._nursery: Optional[trio.Nursery] = None + self._options = options + + async def _trio_main(self) -> None: + self._stop_event = trio.Event() + async with trio.open_nursery() as self._nursery: + await self._stop_event.wait() + + async def _call_func( + self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict + ) -> None: + try: + retval = await func(*args, **kwargs) + except BaseException as exc: + self._result_queue.append(Error(exc)) + else: + self._result_queue.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._nursery = None + + def _get_nursery(self) -> trio.Nursery: + if self._nursery is None: + trio.lowlevel.start_guest_run( + self._trio_main, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._nursery is None: + self._call_queue.get()() + + return self._nursery + + def _call( + self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object + ) -> T_Retval: + self._get_nursery().start_soon(self._call_func, func, args, kwargs) + while not self._result_queue: + self._call_queue.get()() + + outcome = self._result_queue.pop() + return outcome.unwrap() + + def close(self) -> None: + if self._stop_event: + self._stop_event.set() + while self._nursery is not None: + self._call_queue.get()() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: Dict[str, Any], + ) -> Iterable[T_Retval]: + async def fixture_runner(*, task_status: "TaskStatus") -> None: + agen = fixture_func(**kwargs) + retval = await agen.asend(None) + task_status.started(retval) + await teardown_event.wait() + try: + await agen.asend(None) + except StopAsyncIteration: + pass + else: + await agen.aclose() + raise RuntimeError("Async generator fixture did not stop") + + teardown_event = trio.Event() + fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner)) + yield fixture_value + teardown_event.set() + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: Dict[str, Any], + ) -> T_Retval: + return self._call(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + self._call(test_func, **kwargs) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__init__.py b/sbsheriff/Lib/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..8fe0275 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000..813a9fd Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_compat.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc new file mode 100644 index 0000000..6f110c2 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc new file mode 100644 index 0000000..58ddbb1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc new file mode 100644 index 0000000..e0e5c9a Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc new file mode 100644 index 0000000..ef45019 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc new file mode 100644 index 0000000..9a0ba34 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc new file mode 100644 index 0000000..235c1db Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc new file mode 100644 index 0000000..7494ab7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc new file mode 100644 index 0000000..dab3cfc Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc new file mode 100644 index 0000000..ec18f6b Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc new file mode 100644 index 0000000..60591a7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc new file mode 100644 index 0000000..33c0d4c Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc new file mode 100644 index 0000000..25a9e76 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_compat.py b/sbsheriff/Lib/site-packages/anyio/_core/_compat.py new file mode 100644 index 0000000..7062be5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_compat.py @@ -0,0 +1,218 @@ +from abc import ABCMeta, abstractmethod +from contextlib import AbstractContextManager +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncContextManager, + Callable, + ContextManager, + Generator, + Generic, + Iterable, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) +from warnings import warn + +if TYPE_CHECKING: + from ._testing import TaskInfo +else: + TaskInfo = object + +T = TypeVar("T") +AnyDeprecatedAwaitable = Union[ + "DeprecatedAwaitable", + "DeprecatedAwaitableFloat", + "DeprecatedAwaitableList[T]", + TaskInfo, +] + + +@overload +async def maybe_async(__obj: TaskInfo) -> TaskInfo: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitableFloat") -> float: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitableList[T]") -> List[T]: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitable") -> None: + ... + + +async def maybe_async( + __obj: "AnyDeprecatedAwaitable[T]", +) -> Union[TaskInfo, float, List[T], None]: + """ + Await on the given object if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were converted from coroutine functions into regular functions. + + Do **not** try to use this for any other purpose! + + :return: the result of awaiting on the object if coroutine, or the object itself otherwise + + .. versionadded:: 2.2 + + """ + return __obj._unwrap() + + +class _ContextManagerWrapper: + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + async def __aenter__(self) -> T: + return self._cm.__enter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def maybe_async_cm( + cm: Union[ContextManager[T], AsyncContextManager[T]] +) -> AsyncContextManager[T]: + """ + Wrap a regular context manager as an async one if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were changed to return regular context managers instead of async ones. + + :param cm: a regular or async context manager + :return: an async context manager + + .. versionadded:: 2.2 + + """ + if not isinstance(cm, AbstractContextManager): + raise TypeError("Given object is not an context manager") + + return _ContextManagerWrapper(cm) + + +def _warn_deprecation( + awaitable: "AnyDeprecatedAwaitable[Any]", stacklevel: int = 1 +) -> None: + warn( + f'Awaiting on {awaitable._name}() is deprecated. Use "await ' + f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x " + f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', + DeprecationWarning, + stacklevel=stacklevel + 1, + ) + + +class DeprecatedAwaitable: + def __init__(self, func: Callable[..., "DeprecatedAwaitable"]): + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, None]: + _warn_deprecation(self) + if False: + yield + + def __reduce__(self) -> Tuple[Type[None], Tuple[()]]: + return type(None), () + + def _unwrap(self) -> None: + return None + + +class DeprecatedAwaitableFloat(float): + def __new__( + cls, x: float, func: Callable[..., "DeprecatedAwaitableFloat"] + ) -> "DeprecatedAwaitableFloat": + return super().__new__(cls, x) + + def __init__(self, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]): + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, float]: + _warn_deprecation(self) + if False: + yield + + return float(self) + + def __reduce__(self) -> Tuple[Type[float], Tuple[float]]: + return float, (float(self),) + + def _unwrap(self) -> float: + return float(self) + + +class DeprecatedAwaitableList(List[T]): + def __init__( + self, + iterable: Iterable[T] = (), + *, + func: Callable[..., "DeprecatedAwaitableList[T]"], + ): + super().__init__(iterable) + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, List[T]]: + _warn_deprecation(self) + if False: + yield + + return list(self) + + def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]: + return list, (list(self),) + + def _unwrap(self) -> List[T]: + return list(self) + + +class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): + @abstractmethod + def __enter__(self) -> T: + pass + + @abstractmethod + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + pass + + async def __aenter__(self) -> T: + warn( + f"Using {self.__class__.__name__} as an async context manager has been deprecated. " + f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' + f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' + f"you are completely migrating to AnyIO 3+.", + DeprecationWarning, + ) + return self.__enter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_eventloop.py b/sbsheriff/Lib/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..f027ae5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,155 @@ +import math +import sys +import threading +from contextlib import contextmanager +from importlib import import_module +from typing import ( + Any, + Callable, + Coroutine, + Dict, + Generator, + Optional, + Tuple, + Type, + TypeVar, +) + +import sniffio + +# This must be updated when new backends are introduced +from ._compat import DeprecatedAwaitableFloat + +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +threadlocals = threading.local() + + +def run( + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + backend: str = "asyncio", + backend_options: Optional[Dict[str, Any]] = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently either + ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` implementation with + (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + asynclib = import_module(f"..._backends._{backend}", package=__name__) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return asynclib.run(func, *args, **backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_asynclib().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal monotonic clock of + the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> DeprecatedAwaitableFloat: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + + +def get_all_backends() -> Tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> Type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_asynclib().CancelledError + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread(backend: str) -> Generator[Any, None, None]: + module = sys.modules["anyio._backends._" + backend] + threadlocals.current_async_module = module + try: + yield + finally: + del threadlocals.current_async_module + + +def get_asynclib(asynclib_name: Optional[str] = None) -> Any: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = "anyio._backends._" + asynclib_name + try: + return sys.modules[modulename] + except KeyError: + return import_module(modulename) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_exceptions.py b/sbsheriff/Lib/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..db2bbcf --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,93 @@ +from traceback import format_exception +from typing import List + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external causes + (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise + misbehaves. + """ + + +class BusyResourceError(Exception): + """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """Raised when trying to read from a stream that has been closed from the other end.""" + + +class ExceptionGroup(BaseException): + """ + Raised when multiple exceptions have been raised in a task group. + + :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + """ + + SEPARATOR = "----------------------------\n" + + exceptions: List[BaseException] + + def __str__(self) -> str: + tracebacks = [ + "".join(format_exception(type(exc), exc, exc.__traceback__)) + for exc in self.exceptions + ] + return ( + f"{len(self.exceptions)} exceptions were raised in the task group:\n" + f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}" + ) + + def __repr__(self) -> str: + exception_reprs = ", ".join(repr(exc) for exc in self.exceptions) + return f"<{self.__class__.__name__}: {exception_reprs}>" + + +class IncompleteRead(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not + found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_fileio.py b/sbsheriff/Lib/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..19c1e83 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,607 @@ +import os +import pathlib +import sys +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + AsyncIterator, + Callable, + Generic, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the following + blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the underlying file + at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: "AsyncFile[bytes]", size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> List[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: "AsyncFile[bytes]", b: ReadableBuffer) -> int: + ... + + @overload + async def write(self: "AsyncFile[str]", b: str) -> int: + ... + + async def write(self, b: Union[ReadableBuffer, str]) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: "AsyncFile[bytes]", lines: Iterable[ReadableBuffer] + ) -> None: + ... + + @overload + async def writelines(self: "AsyncFile[str]", lines: Iterable[str]) -> None: + ... + + async def writelines( + self, lines: Union[Iterable[ReadableBuffer], Iterable[str]] + ) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: Optional[int] = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: OpenBinaryMode, + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ..., +) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ..., +) -> AsyncFile[str]: + ... + + +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + closefd: bool = True, + opener: Optional[Callable[[str, int], int]] = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator["PathLike[str]"] + + async def __anext__(self) -> "Path": + nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + if nextval is None: + raise StopAsyncIteration from None + + return Path(cast("PathLike[str]", nextval)) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but + it is compatible with the :class:`os.PathLike` interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the + deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: Union[str, "PathLike[str]"]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: Any) -> "Path": + return Path(self._path / other) + + def __rtruediv__(self, other: Any) -> "Path": + return Path(other) / self + + @property + def parts(self) -> Tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence["Path"]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> "Path": + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> List[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> "Path": + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, *other: Union[str, "PathLike[str]"]) -> bool: + try: + self.relative_to(*other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> "Path": + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, cancellable=True) + + async def expanduser(self) -> "Path": + return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + + def glob(self, pattern: str) -> AsyncIterator["Path"]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, cancellable=True) + + async def hardlink_to(self, target: Union[str, pathlib.Path, "Path"]) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> "Path": + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, cancellable=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, cancellable=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, cancellable=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + + def iterdir(self) -> AsyncIterator["Path"]: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: Union[str, "PathLike[str]"]) -> "Path": + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, cancellable=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + ) -> AsyncFile[bytes]: + ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + ) -> AsyncFile[str]: + ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, cancellable=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: Optional[str] = None, errors: Optional[str] = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + def relative_to(self, *other: Union[str, "PathLike[str]"]) -> "Path": + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> "Path": + target = await to_thread.run_sync(os.readlink, self._path) + return Path(cast(str, target)) + + async def rename(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path": + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path": + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> "Path": + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, cancellable=True)) + + def rglob(self, pattern: str) -> AsyncIterator["Path"]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile( + self, other_path: Union[str, bytes, int, pathlib.Path, "Path"] + ) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, cancellable=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, cancellable=True) + + async def symlink_to( + self, + target: Union[str, pathlib.Path, "Path"], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + def with_name(self, name: str) -> "Path": + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> "Path": + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> "Path": + return Path(self._path.with_suffix(suffix)) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_resources.py b/sbsheriff/Lib/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9414f7 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_resources.py @@ -0,0 +1,16 @@ +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_signals.py b/sbsheriff/Lib/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..02234fd --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_signals.py @@ -0,0 +1,24 @@ +from typing import AsyncIterator + +from ._compat import DeprecatedAsyncContextManager +from ._eventloop import get_asynclib + + +def open_signal_receiver( + *signals: int, +) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields signal + numbers + + .. warning:: Windows does not support signals natively so it is best to avoid relying on this + in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for the given + signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_asynclib().open_signal_receiver(*signals) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_sockets.py b/sbsheriff/Lib/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..ca85d30 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,587 @@ +import socket +import ssl +import sys +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from pathlib import Path +from socket import AddressFamily, SocketKind +from typing import Awaitable, List, Optional, Tuple, Union, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_asynclib +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +GetAddrInfoReturnType = List[ + Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]] +] +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + tls: Literal[True], + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + tls: Literal[False], + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = None, + tls: bool = False, + ssl_context: Optional[ssl.SSLContext] = None, + tls_standard_compatible: bool = True, + tls_hostname: Optional[str] = None, + happy_eyeballs_delay: float = 0.25, +) -> Union[SocketStream, TLSStream]: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555). + If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until + one connection attempt succeeds. If the first attempt does not connected within 250 + milliseconds, a second attempt is started using the next address in the list, and so on. + On IPv6 enabled systems, an IPv6 address (if available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing + the stream and requires that the server does this as well. Otherwise, + :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to the value + of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: Optional[SocketStream] = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_asynclib() + local_address: Optional[IPSockAddrType] = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) and the + # second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: List[Tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: List[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + raise OSError("All connection attempts failed") from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: Union[str, "PathLike[str]"]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = str(Path(path)) + return await get_asynclib().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4 + and IPv6 interfaces. To listen on all interfaces on a specific address family, use + ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``interface`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_asynclib() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, # type: ignore[arg-type] + local_port, + family=family, + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: List[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + for fam, *_, sockaddr in sorted(set(gai_res)): + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.TCPSocketListener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: Union[str, "PathLike[str]"], + *, + mode: Optional[int] = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be removed first. + + """ + path_str = str(path) + path = Path(path) + if path.is_socket(): + path.unlink() + + backlog = min(backlog, 65536) + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + try: + await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, cancellable=True) + + raw_socket.listen(backlog) + return get_asynclib().UNIXSocketListener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local machine, + making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + return await get_asynclib().create_udp_socket( + family, local_address, None, reuse_port + ) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, and any packets + sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + + +async def getaddrinfo( + host: Union[bytearray, bytes, str], + port: Union[str, int, None], + *, + family: Union[int, AddressFamily] = 0, + type: Union[int, SocketKind] = 0, + proto: int = 0, + flags: int = 0, +) -> GetAddrInfoReturnType: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) IDNA 2008 + standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_asynclib().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_asynclib().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_asynclib().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_asynclib().wait_socket_writable(sock) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]] +) -> Tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + if scope_id: + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return cast(Tuple[str, int], sockaddr) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_streams.py b/sbsheriff/Lib/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..58954a6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_streams.py @@ -0,0 +1,45 @@ +import math +from typing import Any, Optional, Tuple, Type, TypeVar, overload + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +@overload +def create_memory_object_stream( + max_buffer_size: float, item_type: Type[T_Item] +) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + ... + + +@overload +def create_memory_object_stream( + max_buffer_size: float = 0, +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + ... + + +def create_memory_object_stream( + max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + """ + Create a memory object stream. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking + :param item_type: type of item, for marking the streams with the right generic type for + static typing (not used at run time) + :return: a tuple of (send stream, receive stream) + + """ + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + + state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) + return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_subprocesses.py b/sbsheriff/Lib/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..43fa6b6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,136 @@ +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import ( + IO, + Any, + AsyncIterable, + List, + Mapping, + Optional, + Sequence, + Union, + cast, +) + +from ..abc import Process +from ._eventloop import get_asynclib +from ._tasks import create_task_group + + +async def run_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + input: Optional[bytes] = None, + stdout: Union[int, IO[Any], None] = PIPE, + stderr: Union[int, IO[Any], None] = PIPE, + check: bool = True, + cwd: Union[str, bytes, "PathLike[str]", None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> "CompletedProcess[bytes]": + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process + terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the command + :param env: if not ``None``, this mapping replaces the inherited environment variables from the + parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a + nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process( + command, + stdin=PIPE if input else DEVNULL, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) as process: + stream_contents: List[Optional[bytes]] = [None, None] + try: + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + except BaseException: + process.kill() + raise + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + stdin: Union[int, IO[Any], None] = PIPE, + stdout: Union[int, IO[Any], None] = PIPE, + stderr: Union[int, IO[Any], None] = PIPE, + cwd: Union[str, bytes, "PathLike[str]", None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the environment + variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + shell = isinstance(command, str) + return await get_asynclib().open_process( + command, + shell=shell, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_synchronization.py b/sbsheriff/Lib/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..15d4afc --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,595 @@ +from collections import deque +from dataclasses import dataclass +from types import TracebackType +from typing import Deque, Optional, Tuple, Type +from warnings import warn + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._compat import DeprecatedAwaitable +from ._eventloop import get_asynclib +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this + limiter + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: Tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not + held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: Optional[TaskInfo] + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> "Event": + return get_asynclib().Event() + + def set(self) -> DeprecatedAwaitable: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class Lock: + _owner_task: Optional[TaskInfo] = None + + def __init__(self) -> None: + self._waiters: Deque[Tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> DeprecatedAwaitable: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding this lock") + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: Optional[TaskInfo] = None + + def __init__(self, lock: Optional[Lock] = None): + self._lock = lock or Lock() + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> DeprecatedAwaitable: + """Release the underlying lock.""" + self._lock.release() + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: Optional[int] = None): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._value = initial_value + self._max_value = max_value + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> "Semaphore": + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> DeprecatedAwaitable: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + return DeprecatedAwaitable(self.release) + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> Optional[int]: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return get_asynclib().CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + async def set_total_tokens(self, value: float) -> None: + warn( + "CapacityLimiter.set_total_tokens has been deprecated. Set the value of the" + '"total_tokens" attribute directly.', + DeprecationWarning, + ) + self.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> DeprecatedAwaitable: + """ + Acquire a token for the current task without waiting for one to become available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +def create_lock() -> Lock: + """ + Create an asynchronous lock. + + :return: a lock object + + .. deprecated:: 3.0 + Use :class:`~Lock` directly. + + """ + warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning) + return Lock() + + +def create_condition(lock: Optional[Lock] = None) -> Condition: + """ + Create an asynchronous condition. + + :param lock: the lock to base the condition object on + :return: a condition object + + .. deprecated:: 3.0 + Use :class:`~Condition` directly. + + """ + warn( + "create_condition() is deprecated -- use Condition() directly", + DeprecationWarning, + ) + return Condition(lock=lock) + + +def create_event() -> Event: + """ + Create an asynchronous event object. + + :return: an event object + + .. deprecated:: 3.0 + Use :class:`~Event` directly. + + """ + warn("create_event() is deprecated -- use Event() directly", DeprecationWarning) + return get_asynclib().Event() + + +def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore: + """ + Create an asynchronous semaphore. + + :param value: the semaphore's initial value + :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the + semaphore's value would exceed this number + :return: a semaphore object + + .. deprecated:: 3.0 + Use :class:`~Semaphore` directly. + + """ + warn( + "create_semaphore() is deprecated -- use Semaphore() directly", + DeprecationWarning, + ) + return Semaphore(value, max_value=max_value) + + +def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: + """ + Create a capacity limiter. + + :param total_tokens: the total number of tokens available for borrowing (can be an integer or + :data:`math.inf`) + :return: a capacity limiter object + + .. deprecated:: 3.0 + Use :class:`~CapacityLimiter` directly. + + """ + warn( + "create_capacity_limiter() is deprecated -- use CapacityLimiter() directly", + DeprecationWarning, + ) + return get_asynclib().CapacityLimiter(total_tokens) + + +class ResourceGuard: + __slots__ = "action", "_guarded" + + def __init__(self, action: str): + self.action = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._guarded = False + return None diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_tasks.py b/sbsheriff/Lib/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..f24764c --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,178 @@ +import math +from types import TracebackType +from typing import Optional, Type +from warnings import warn + +from ..abc._tasks import TaskGroup, TaskStatus +from ._compat import ( + DeprecatedAsyncContextManager, + DeprecatedAwaitable, + DeprecatedAwaitableFloat, +) +from ._eventloop import get_asynclib + + +class _IgnoredTaskStatus(TaskStatus): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> "CancelScope": + return get_asynclib().CancelScope(shield=shield, deadline=deadline) + + def cancel(self) -> DeprecatedAwaitable: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> "CancelScope": + raise NotImplementedError + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + raise NotImplementedError + + +def open_cancel_scope(*, shield: bool = False) -> CancelScope: + """ + Open a cancel scope. + + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + .. deprecated:: 3.0 + Use :class:`~CancelScope` directly. + + """ + warn( + "open_cancel_scope() is deprecated -- use CancelScope() directly", + DeprecationWarning, + ) + return get_asynclib().CancelScope(shield=shield) + + +class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): + def __init__(self, cancel_scope: CancelScope): + self._cancel_scope = cancel_scope + + def __enter__(self) -> CancelScope: + return self._cancel_scope.__enter__() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if self._cancel_scope.cancel_called: + raise TimeoutError + + return retval + + +def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to + disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\] + + """ + deadline = ( + (get_asynclib().current_time() + delay) if delay is not None else math.inf + ) + cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) + return FailAfterContextManager(cancel_scope) + + +def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` + to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_asynclib().current_time() + delay) if delay is not None else math.inf + ) + return get_asynclib().CancelScope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> DeprecatedAwaitableFloat: + """ + Return the nearest deadline among all the cancel scopes effective for the current task. + + :return: a clock value from the event loop's internal clock (``float('inf')`` if there is no + deadline in effect) + :rtype: float + + """ + return DeprecatedAwaitableFloat( + get_asynclib().current_effective_deadline(), current_effective_deadline + ) + + +def create_task_group() -> "TaskGroup": + """ + Create a task group. + + :return: a task group + + """ + return get_asynclib().TaskGroup() diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_testing.py b/sbsheriff/Lib/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..4998753 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_testing.py @@ -0,0 +1,80 @@ +from typing import Any, Awaitable, Generator, Optional, Union + +from ._compat import DeprecatedAwaitableList, _warn_deprecation +from ._eventloop import get_asynclib + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: Optional[int], + name: Optional[str], + coro: Union[Generator, Awaitable[Any]], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: Optional[int] = parent_id + self.name: Optional[str] = name + self.coro: Union[Generator, Awaitable[Any]] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def __await__(self) -> Generator[None, None, "TaskInfo"]: + _warn_deprecation(self) + if False: + yield + + return self + + def _unwrap(self) -> "TaskInfo": + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_asynclib().get_current_task() + + +def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + tasks = get_asynclib().get_running_tasks() + return DeprecatedAwaitableList(tasks, func=get_running_tasks) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_asynclib().wait_all_tasks_blocked() diff --git a/sbsheriff/Lib/site-packages/anyio/_core/_typedattr.py b/sbsheriff/Lib/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..424836a --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +import sys +from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload + +from ._exceptions import TypedAttributeLookupError + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: Dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding values. + + If the provider wraps another provider, the attributes from that wrapper should also be + included in the returned mapping (but the wrapper may override the callables from the + wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for + :param default: the value that should be returned if no value is found for the attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was + given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__init__.py b/sbsheriff/Lib/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..72c4444 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/__init__.py @@ -0,0 +1,88 @@ +__all__ = ( + "AsyncResource", + "IPAddressType", + "IPSockAddrType", + "SocketAttribute", + "SocketStream", + "SocketListener", + "UDPSocket", + "UNIXSocketStream", + "UDPPacketType", + "ConnectedUDPSocket", + "UnreliableObjectReceiveStream", + "UnreliableObjectSendStream", + "UnreliableObjectStream", + "ObjectReceiveStream", + "ObjectSendStream", + "ObjectStream", + "ByteReceiveStream", + "ByteSendStream", + "ByteStream", + "AnyUnreliableByteReceiveStream", + "AnyUnreliableByteSendStream", + "AnyUnreliableByteStream", + "AnyByteReceiveStream", + "AnyByteSendStream", + "AnyByteStream", + "Listener", + "Process", + "Event", + "Condition", + "Lock", + "Semaphore", + "CapacityLimiter", + "CancelScope", + "TaskGroup", + "TaskStatus", + "TestRunner", + "BlockingPortal", +) + +from typing import Any + +from ._resources import AsyncResource +from ._sockets import ( + ConnectedUDPSocket, + IPAddressType, + IPSockAddrType, + SocketAttribute, + SocketListener, + SocketStream, + UDPPacketType, + UDPSocket, + UNIXSocketStream, +) +from ._streams import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + AnyUnreliableByteReceiveStream, + AnyUnreliableByteSendStream, + AnyUnreliableByteStream, + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + UnreliableObjectReceiveStream, + UnreliableObjectSendStream, + UnreliableObjectStream, +) +from ._subprocesses import Process +from ._tasks import TaskGroup, TaskStatus +from ._testing import TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore +from .._core._tasks import CancelScope +from ..from_thread import BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio.abc."): + value.__module__ = __name__ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..3be501e Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc new file mode 100644 index 0000000..8566d34 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc new file mode 100644 index 0000000..9ef925f Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_sockets.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc new file mode 100644 index 0000000..5a085eb Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_streams.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc new file mode 100644 index 0000000..caeb0af Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc new file mode 100644 index 0000000..0ad75a6 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_tasks.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc new file mode 100644 index 0000000..3294ad0 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/abc/__pycache__/_testing.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_resources.py b/sbsheriff/Lib/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..4f66c38 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_resources.py @@ -0,0 +1,29 @@ +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Optional, Type, TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, and calls + :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_sockets.py b/sbsheriff/Lib/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..f73e795 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,183 @@ +import socket +from abc import abstractmethod +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + Callable, + Collection, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: Dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: Optional[Tuple[str, int]] = convert( + self._raw_socket.getpeername() + ) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[ + SocketAttribute.local_port + ] = lambda: self._raw_socket.getsockname()[1] + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file or socket + objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + from .. import create_task_group + + context_manager: AsyncContextManager + if task_group is None: + task_group = context_manager = create_task_group() + else: + # Can be replaced with AsyncExitStack once on py3.7+ + context_manager = _NullAsyncContextManager() + + async with context_manager: + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_streams.py b/sbsheriff/Lib/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..4980ef4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_streams.py @@ -0,0 +1,198 @@ +from abc import abstractmethod +from typing import Any, Callable, Generic, Optional, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +class UnreliableObjectReceiveStream( + Generic[T_Item], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in which they + were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the given type + parameter. + """ + + def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]": + return self + + async def __anext__(self) -> T_Item: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_Item: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_Item], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the recipient(s) in the + same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_Item) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of message + delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]): + """ + A receive message stream which guarantees that messages are received in the same order in + which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_Item]): + """ + A send message stream which guarantees that messages are delivered in the same order in which + they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more than + 65536 bytes. + """ + + def __aiter__(self) -> "ByteReceiveStream": + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty :class:`bytes` object, + and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling each + accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_subprocesses.py b/sbsheriff/Lib/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..1e633fb --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,78 @@ +from abc import abstractmethod +from signal import Signals +from typing import Optional + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> Optional[int]: + """ + The return code of the process. If the process has not yet terminated, this will be + ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> Optional[ByteSendStream]: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> Optional[ByteReceiveStream]: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> Optional[ByteReceiveStream]: + """The stream for the standard error output of the process.""" diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_tasks.py b/sbsheriff/Lib/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..99928a1 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,104 @@ +import typing +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Any, Callable, Coroutine, Optional, Type, TypeVar +from warnings import warn + +if typing.TYPE_CHECKING: + from anyio._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") + + +class TaskStatus(metaclass=ABCMeta): + @abstractmethod + def started(self, value: object = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: "CancelScope" + + async def spawn( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. deprecated:: 3.0 + Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn( + 'spawn() is deprecated -- use start_soon() (without the "await") instead', + DeprecationWarning, + ) + self.start_soon(func, *args, name=name) + + @abstractmethod + def start_soon( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> object: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> "TaskGroup": + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/sbsheriff/Lib/site-packages/anyio/abc/_testing.py b/sbsheriff/Lib/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..4e3621d --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/abc/_testing.py @@ -0,0 +1,68 @@ +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Iterable +from typing import Any, Callable, Coroutine, Dict, Optional, Type, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the same event + loop. + """ + + def __enter__(self) -> "TestRunner": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType], + ) -> Optional[bool]: + self.close() + return None + + @abstractmethod + def close(self) -> None: + """Close the event loop.""" + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., "AsyncGenerator[_T, Any]"], + kwargs: Dict[str, Any], + ) -> "Iterable[_T]": + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: Dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/sbsheriff/Lib/site-packages/anyio/from_thread.py b/sbsheriff/Lib/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..e4f871f --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/from_thread.py @@ -0,0 +1,502 @@ +import threading +from asyncio import iscoroutine +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + Callable, + ContextManager, + Coroutine, + Dict, + Generator, + Iterable, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from warnings import warn + +from ._core import _eventloop +from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co") + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError("This function can only be run from an AnyIO worker thread") + + return asynclib.run_async_from_thread(func, *args) + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + warn( + "run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead", + DeprecationWarning, + ) + return run(func, *args) + + +def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError("This function can only be run from an AnyIO worker thread") + + return asynclib.run_sync_from_thread(func, *args) + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: + warn( + "run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead", + DeprecationWarning, + ) + return run_sync(func, *args) + + +class _BlockingAsyncContextManager(AbstractContextManager): + _enter_future: Future + _exit_future: Future + _exit_event: Event + _exit_exc_info: Tuple[ + Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType] + ] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: "BlockingPortal"): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> Optional[bool]: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + cm = self._enter_future.result() + return cast(T_co, cm) + + def __exit__( + self, + __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> Optional[bool]: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> "BlockingPortal": + return get_asynclib().BlockingPortal() + + def __init__(self) -> None: + self._event_loop_thread_id: Optional[int] = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> "BlockingPortal": + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them + finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, func: Callable, args: tuple, kwargs: Dict[str, Any], future: Future + ) -> None: + def callback(f: Future) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + threading.get_ident(), + ): + self.call(scope.cancel) + + try: + retval = func(*args, **kwargs) + if iscoroutine(retval): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval + except self._cancelled_exc_class: + future.cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, or the + exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object + ) -> T_Retval: + ... + + @overload + def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + ... + + def call( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def spawn_task( + self, + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + ... + + @overload + def spawn_task( + self, func: Callable[..., T_Retval], *args: object, name: object = None + ) -> "Future[T_Retval]": + ... + + def spawn_task( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 2.1 + .. deprecated:: 3.0 + Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn( + "spawn_task() is deprecated -- use start_task_soon() instead", + DeprecationWarning, + ) + return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] + + @overload + def start_task_soon( + self, + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + ... + + @overload + def start_task_soon( + self, func: Callable[..., T_Retval], *args: object, name: object = None + ) -> "Future[T_Retval]": + ... + + def start_task_soon( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling the + returned future. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> Tuple["Future[Any]", Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`TaskGroup.start`. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the + value passed to ``task_status.started()`` from within the target function + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AsyncContextManager[T_co] + ) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the + middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +def create_blocking_portal() -> BlockingPortal: + """ + Create a portal for running functions in the event loop thread from external threads. + + Use this function in asynchronous code when you need to allow external threads access to the + event loop where your asynchronous code is currently running. + + .. deprecated:: 3.0 + Use :class:`.BlockingPortal` directly. + + """ + warn( + "create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() " + "directly", + DeprecationWarning, + ) + return BlockingPortal() + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: Optional[Dict[str, Any]] = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit( + _eventloop.run, + run_portal, # type: ignore[arg-type] + backend=backend, + backend_options=backend_options, + ) + try: + wait( + cast(Iterable[Future], [run_future, future]), + return_when=FIRST_COMPLETED, + ) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + try: + yield portal + except BaseException: + portal.call(portal.stop, True) + raise + + portal.call(portal.stop, False) + + run_future.result() diff --git a/sbsheriff/Lib/site-packages/anyio/lowlevel.py b/sbsheriff/Lib/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..c1da8fa --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/lowlevel.py @@ -0,0 +1,170 @@ +import enum +import sys +from dataclasses import dataclass +from typing import Any, Dict, Generic, Set, TypeVar, Union, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_asynclib + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().cancel_shielded_checkpoint() + + +def current_token() -> object: + """Return a backend specific token object that can be used to get back to the event loop.""" + return get_asynclib().current_token() + + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]] +_token_wrappers: Dict[Any, "_TokenWrapper"] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__( + self, var: "RunVar[T]", value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] + ): + self._var = var + self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop.""" + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: Set[_TokenWrapper] = set() + + def __init__( + self, + name: str, + default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET, + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> Dict[str, T]: + token = current_token() + while True: + try: + return _run_vars[token] + except TypeError: + # Happens when token isn't weak referable (TrioToken). + # This workaround does mean that some memory will leak on Trio until the problem + # is fixed on their end. + token = _TokenWrapper(token) + self._token_wrappers.add(token) + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> Union[T, D]: + ... + + @overload + def get(self) -> T: + ... + + def get( + self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET + ) -> Union[T, D]: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/sbsheriff/Lib/site-packages/anyio/py.typed b/sbsheriff/Lib/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/anyio/pytest_plugin.py b/sbsheriff/Lib/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..432eee3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,144 @@ +from contextlib import contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Tuple, cast + +import pytest +import sniffio +from _pytest.fixtures import FixtureRequest + +from ._core._eventloop import get_all_backends, get_asynclib +from .abc import TestRunner + +if TYPE_CHECKING: + from _pytest.config import Config + +_current_runner: Optional[TestRunner] = None + + +def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: Dict[str, Any] +) -> Generator[TestRunner, object, None]: + global _current_runner + if _current_runner: + yield _current_runner + return + + asynclib = get_asynclib(backend_name) + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend_name) + + try: + backend_options = backend_options or {} + with asynclib.TestRunner(**backend_options) as runner: + _current_runner = runner + yield runner + finally: + _current_runner = None + if token: + sniffio.current_async_library_cvar.reset(token) + + +def pytest_configure(config: "Config") -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run " + "asynchronously via anyio.", + ) + + +def pytest_fixture_setup(fixturedef: Any, request: FixtureRequest) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + yield from runner.run_asyncgen_fixture(func, kwargs) + else: + yield runner.run_fixture(func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests that involve + # the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + has_backend_arg = "anyio_backend" in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ("anyio_backend",) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.run_test(pyfuncitem.obj, testargs) + + return True + + return None + + +@pytest.fixture(params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__init__.py b/sbsheriff/Lib/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..87b41d9 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc new file mode 100644 index 0000000..d96fe29 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/buffered.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc new file mode 100644 index 0000000..c915245 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/file.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc new file mode 100644 index 0000000..6d10c30 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/memory.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc new file mode 100644 index 0000000..c75dbf7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/stapled.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc new file mode 100644 index 0000000..b1bba72 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/text.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc new file mode 100644 index 0000000..85931d6 Binary files /dev/null and b/sbsheriff/Lib/site-packages/anyio/streams/__pycache__/tls.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/anyio/streams/buffered.py b/sbsheriff/Lib/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..1503b3e --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/buffered.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass, field +from typing import Any, Callable, Mapping + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving + capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes we get from + # the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/sbsheriff/Lib/site-packages/anyio/streams/file.py b/sbsheriff/Lib/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..938d1da --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/file.py @@ -0,0 +1,145 @@ +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: Dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, "PathLike[str]"]) -> "FileReadStream": + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: Union[str, "PathLike[str]"], append: bool = False + ) -> "FileWriteStream": + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any existing file + at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/sbsheriff/Lib/site-packages/anyio/streams/memory.py b/sbsheriff/Lib/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..d8a958c --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/memory.py @@ -0,0 +1,275 @@ +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, + get_cancelled_exc_class, +) +from .._core._compat import DeprecatedAwaitable +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: Deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: "OrderedDict[Event, List[T_Item]]" = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: "OrderedDict[Event, T_Item]" = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_Item: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_Item: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: List[T_Item] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + except get_cancelled_exc_class(): + # Ignore the immediate cancellation if we already received an item, so as not to + # lose it + if not container: + raise + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> "MemoryObjectReceiveStream[T_Item]": + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will the + receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> "MemoryObjectReceiveStream[T_Item]": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_Item) -> DeprecatedAwaitable: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + return DeprecatedAwaitable(self.send_nowait) + + async def send(self, item: T_Item) -> None: + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + raise + + if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] + raise BrokenResourceError + + def clone(self) -> "MemoryObjectSendStream[T_Item]": + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will the + sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> "MemoryObjectSendStream[T_Item]": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() diff --git a/sbsheriff/Lib/site-packages/anyio/streams/stapled.py b/sbsheriff/Lib/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..a71ffb0 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/stapled.py @@ -0,0 +1,138 @@ +from dataclasses import dataclass +from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners moved into + this one. + + Extra attributes are provided from each listener, with each successive listener overriding any + conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: List[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/sbsheriff/Lib/site-packages/anyio/streams/text.py b/sbsheriff/Lib/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..ccb683c --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/text.py @@ -0,0 +1,141 @@ +import codecs +from dataclasses import InitVar, dataclass, field +from typing import Any, Callable, Mapping, Tuple + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely + received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults to + ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes (defaults to + ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., Tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on + send. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/sbsheriff/Lib/site-packages/anyio/streams/tls.py b/sbsheriff/Lib/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..c8e19e2 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/streams/tls.py @@ -0,0 +1,317 @@ +import logging +import re +import ssl +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +T_Retval = TypeVar("T_Retval") +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: Optional[str] = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: Tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more + #: information) + peer_certificate: Optional[ + Dict[str, Union[str, _PCTRTTT, _PCTRTT]] + ] = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: Optional[bytes] = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared between both ends of the TLS connection + shared_ciphers: List[Tuple[str, str, int]] = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being + #: closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: Optional[bool] = None, + hostname: Optional[str] = None, + ssl_context: Optional[ssl.SSLContext] = None, + standard_compatible: bool = True, + ) -> "TLSStream": + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, ``False`` if + this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been + provided, ``False`` otherwise). Used only to create a default context when an explicit + context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default will be + created) + :param standard_compatible: if ``False``, skip the closing handshake when closing the + connection, and don't raise an exception if the peer does the same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined] + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[..., T_Retval], *args: object + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if ( + isinstance(exc, ssl.SSLEOFError) + or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> Tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session on every + accepted connection. + + If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is + called to do whatever post-mortem processing is deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + f""" + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the ``{__name__}`` + logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + logging.getLogger(__name__).exception("Error during TLS handshake") + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: Optional[TaskGroup] = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/sbsheriff/Lib/site-packages/anyio/to_process.py b/sbsheriff/Lib/site-packages/anyio/to_process.py new file mode 100644 index 0000000..39a3173 --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/to_process.py @@ -0,0 +1,247 @@ +import os +import pickle +import subprocess +import sys +from collections import deque +from importlib.util import module_from_spec, spec_from_file_location +from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast + +from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +_process_pool_workers: RunVar[Set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the worker process running it will be abruptly terminated using SIGKILL (or + ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's running + :param limiter: capacity limiter to use to limit the total amount of processes running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_asynclib().setup_process_pool_exit_at_shutdown(workers) + + async with (limiter or current_default_process_limiter()): + # Pop processes from the pool (starting from the most recently used) until we find one that + # hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or + # longer + now = current_time() + killed_processes: List[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process, idle_since = idle_workers.popleft() + process.kill() + workers.remove(process) + killed_processes.append(process) + + with CancelScope(shield=True): + for process in killed_processes: + await process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: Optional[str] + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of __main__ + # (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/sbsheriff/Lib/site-packages/anyio/to_thread.py b/sbsheriff/Lib/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..a2fd42f --- /dev/null +++ b/sbsheriff/Lib/site-packages/anyio/to_thread.py @@ -0,0 +1,65 @@ +from typing import Callable, Optional, TypeVar +from warnings import warn + +from ._core._eventloop import get_asynclib +from .abc import CapacityLimiter + +T_Retval = TypeVar("T_Retval") + + +async def run_sync( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the thread will still run its course but its return value (or any raised exception) will be + ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + return await get_asynclib().run_sync_in_worker_thread( + func, *args, cancellable=cancellable, limiter=limiter + ) + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None +) -> T_Retval: + warn( + "run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead", + DeprecationWarning, + ) + return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of concurrent threads. + + :return: a capacity limiter object + + """ + return get_asynclib().current_default_thread_limiter() + + +def current_default_worker_thread_limiter() -> CapacityLimiter: + warn( + "current_default_worker_thread_limiter() has been deprecated, " + "use anyio.to_thread.current_default_thread_limiter() instead", + DeprecationWarning, + ) + return current_default_thread_limiter() diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/AUTHORS b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/AUTHORS new file mode 100644 index 0000000..64bc938 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/AUTHORS @@ -0,0 +1,6 @@ +Main contributors +================= + +MagicStack Inc.: + Elvis Pranskevichus + Yury Selivanov diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/INSTALLER b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/LICENSE b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/LICENSE new file mode 100644 index 0000000..d931386 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/LICENSE @@ -0,0 +1,204 @@ +Copyright (C) 2016-present the asyncpg authors and contributors. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (C) 2016-present the asyncpg authors and contributors + + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/METADATA b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/METADATA new file mode 100644 index 0000000..aaf9da1 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/METADATA @@ -0,0 +1,137 @@ +Metadata-Version: 2.1 +Name: asyncpg +Version: 0.27.0 +Summary: An asyncio PostgreSQL driver +Home-page: https://github.com/MagicStack/asyncpg +Author: MagicStack Inc +Author-email: hello@magic.io +License: Apache License, Version 2.0 +Platform: macOS +Platform: POSIX +Platform: Windows +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.7.0 +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: typing-extensions (>=3.7.4.3) ; python_version < "3.8" +Provides-Extra: dev +Requires-Dist: Cython (<0.30.0,>=0.29.24) ; extra == 'dev' +Requires-Dist: pytest (>=6.0) ; extra == 'dev' +Requires-Dist: Sphinx (~=4.1.2) ; extra == 'dev' +Requires-Dist: sphinxcontrib-asyncio (~=0.3.0) ; extra == 'dev' +Requires-Dist: sphinx-rtd-theme (~=0.5.2) ; extra == 'dev' +Requires-Dist: flake8 (~=5.0.4) ; extra == 'dev' +Requires-Dist: uvloop (>=0.15.3) ; (platform_system != "Windows") and extra == 'dev' +Provides-Extra: docs +Requires-Dist: Sphinx (~=4.1.2) ; extra == 'docs' +Requires-Dist: sphinxcontrib-asyncio (~=0.3.0) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme (~=0.5.2) ; extra == 'docs' +Provides-Extra: test +Requires-Dist: flake8 (~=5.0.4) ; extra == 'test' +Requires-Dist: uvloop (>=0.15.3) ; (platform_system != "Windows") and extra == 'test' + +asyncpg -- A fast PostgreSQL Database Client Library for Python/asyncio +======================================================================= + +.. image:: https://github.com/MagicStack/asyncpg/workflows/Tests/badge.svg + :target: https://github.com/MagicStack/asyncpg/actions?query=workflow%3ATests+branch%3Amaster + :alt: GitHub Actions status +.. image:: https://img.shields.io/pypi/v/asyncpg.svg + :target: https://pypi.python.org/pypi/asyncpg + +**asyncpg** is a database interface library designed specifically for +PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation +of PostgreSQL server binary protocol for use with Python's ``asyncio`` +framework. You can read more about asyncpg in an introductory +`blog post `_. + +asyncpg requires Python 3.7 or later and is supported for PostgreSQL +versions 9.5 to 15. Older PostgreSQL versions or other databases implementing +the PostgreSQL protocol *may* work, but are not being actively tested. + + +Documentation +------------- + +The project documentation can be found +`here `_. + + +Performance +----------- + +In our testing asyncpg is, on average, **3x** faster than psycopg2 +(and its asyncio variant -- aiopg). + +.. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png + :target: https://gistpreview.github.io/?b8eac294ac85da177ff82f784ff2cb60 + +The above results are a geometric mean of benchmarks obtained with PostgreSQL +`client driver benchmarking toolbench `_ +in November 2020 (click on the chart to see full details). + + +Features +-------- + +asyncpg implements PostgreSQL server protocol natively and exposes its +features directly, as opposed to hiding them behind a generic facade +like DB-API. + +This enables asyncpg to have easy-to-use support for: + +* **prepared statements** +* **scrollable cursors** +* **partial iteration** on query results +* automatic encoding and decoding of composite types, arrays, + and any combination of those +* straightforward support for custom data types + + +Installation +------------ + +asyncpg is available on PyPI and has no dependencies. +Use pip to install:: + + $ pip install asyncpg + + +Basic Usage +----------- + +.. code-block:: python + + import asyncio + import asyncpg + + async def run(): + conn = await asyncpg.connect(user='user', password='password', + database='database', host='127.0.0.1') + values = await conn.fetch( + 'SELECT * FROM mytable WHERE id = $1', + 10, + ) + await conn.close() + + loop = asyncio.get_event_loop() + loop.run_until_complete(run()) + + +License +------- + +asyncpg is developed and distributed under the Apache 2.0 license. diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/RECORD b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/RECORD new file mode 100644 index 0000000..d837f99 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/RECORD @@ -0,0 +1,90 @@ +asyncpg-0.27.0.dist-info/AUTHORS,sha256=eFUq_BnBQPCWi3PJTDNl8YCbrcNLfMIiN-b3x_1wTYk,136 +asyncpg-0.27.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +asyncpg-0.27.0.dist-info/LICENSE,sha256=CqxXCYIpatnpe8MnCEeeQM97YNV4fIrBh9hsFYPpLsA,11670 +asyncpg-0.27.0.dist-info/METADATA,sha256=f645fuvGcY89Hm9otHYzMDxTVyaP6Ev1g4Ha2BP07x0,4732 +asyncpg-0.27.0.dist-info/RECORD,, +asyncpg-0.27.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asyncpg-0.27.0.dist-info/WHEEL,sha256=W26pYN7HLsBT1jrDSL9udgf_mdNKJmYmL23sIP-FcgM,102 +asyncpg-0.27.0.dist-info/top_level.txt,sha256=DdhVhpzCq49mykkHNag6i9zuJx05_tx4CMZymM1F8dU,8 +asyncpg/__init__.py,sha256=EbywzGkj2v63N1p2vEs-K2GAuRvZnCesEDNrnl3u8sQ,582 +asyncpg/__pycache__/__init__.cpython-310.pyc,, +asyncpg/__pycache__/_version.cpython-310.pyc,, +asyncpg/__pycache__/cluster.cpython-310.pyc,, +asyncpg/__pycache__/compat.cpython-310.pyc,, +asyncpg/__pycache__/connect_utils.cpython-310.pyc,, +asyncpg/__pycache__/connection.cpython-310.pyc,, +asyncpg/__pycache__/connresource.cpython-310.pyc,, +asyncpg/__pycache__/cursor.cpython-310.pyc,, +asyncpg/__pycache__/introspection.cpython-310.pyc,, +asyncpg/__pycache__/pool.cpython-310.pyc,, +asyncpg/__pycache__/prepared_stmt.cpython-310.pyc,, +asyncpg/__pycache__/serverversion.cpython-310.pyc,, +asyncpg/__pycache__/transaction.cpython-310.pyc,, +asyncpg/__pycache__/types.cpython-310.pyc,, +asyncpg/__pycache__/utils.cpython-310.pyc,, +asyncpg/_testbase/__init__.py,sha256=D51nPHrK13GmfmyaDMdPwYAvq0vokCIr1ohfv3pIZLk,13619 +asyncpg/_testbase/__pycache__/__init__.cpython-310.pyc,, +asyncpg/_testbase/__pycache__/fuzzer.cpython-310.pyc,, +asyncpg/_testbase/fuzzer.py,sha256=nYsnhLXQEquuJtWjWx3feHoj5_Ctu9bH6VCQ2ZsphFc,10110 +asyncpg/_version.py,sha256=j6RiiM3me5HZQfpLWyYLO-CJqP5vqgz67eZ3_ag-MV0,589 +asyncpg/cluster.py,sha256=eHO2-n8j3KjyIMQRn4U1SJkkXyIXAItC1DBk1MH2uOw,23972 +asyncpg/compat.py,sha256=8VhzE3sUMSdNCsR8jYN2gdM2rsW5HqwtK7xT4w6zSN0,1731 +asyncpg/connect_utils.py,sha256=d0myrEK-S0wVXnzaPhSpoKAI7oWReguiXE4VRVg3kqI,31052 +asyncpg/connection.py,sha256=u43EPZBo1pfjOiZTNHQ0V9HLkUOIUj8hABIp2nL6sSY,88402 +asyncpg/connresource.py,sha256=T0vpfsZuS96AkltQfpynAgPLLheVDy_hSj0ejcZkbAQ,1428 +asyncpg/cursor.py,sha256=Ag1i8fiX2dHs3XcDSwI0pvXrRYiDoGlSJC7qLhMk6nc,8990 +asyncpg/exceptions/__init__.py,sha256=w23Y9DxdAv2Adymbffi8_XXtO0OecQmBznlFzuFm8Gs,29887 +asyncpg/exceptions/__pycache__/__init__.cpython-310.pyc,, +asyncpg/exceptions/__pycache__/_base.cpython-310.pyc,, +asyncpg/exceptions/_base.py,sha256=hr3q0Y7vO8cFDRYCldqRFNQwxZxiVbqK2Zv_Qh8JLoE,9043 +asyncpg/introspection.py,sha256=U6q7WIp3zQZ1YBjpwp9jDQ7gdNP8zmawey7Ixg6ZhCI,9166 +asyncpg/pgproto/__init__.pxd,sha256=d36iPoZbiEOs4JLdV2lApUgy664JJZVdAvZTznHs2Qc,218 +asyncpg/pgproto/__init__.py,sha256=d36iPoZbiEOs4JLdV2lApUgy664JJZVdAvZTznHs2Qc,218 +asyncpg/pgproto/__pycache__/__init__.cpython-310.pyc,, +asyncpg/pgproto/__pycache__/types.cpython-310.pyc,, +asyncpg/pgproto/buffer.pxd,sha256=meKBG5f1l2B5B71laYNnJxj7W2jO-S6lGzH0K6zIzXo,4518 +asyncpg/pgproto/buffer.pyx,sha256=0GRpd2zqAR3WcHOl8f2LtIGJrap_bFDiHvYZN6nWYGE,26127 +asyncpg/pgproto/consts.pxi,sha256=4znDppgjfgK1Jt-vrC-9ybK-wLhS2ZXHUu2iSanuAOI,387 +asyncpg/pgproto/cpythonx.pxd,sha256=Q0R3NqDfX3hMKYCv2EHRSAzBH2oQGQMCbJGivsPGHeQ,759 +asyncpg/pgproto/debug.pxd,sha256=8I8LhCtxx0ItNinKMUmC3TrwVpkuvYDDhP4OVse12ns,273 +asyncpg/pgproto/frb.pxd,sha256=EhIOM0QWWhDdLDDZEXXWbacd0VqEbgLj4DMvugGss5A,1289 +asyncpg/pgproto/frb.pyx,sha256=7ARAMMP0nMQpkLCpKfAIl57vswVYNkRRQ3vEZ_f9xBA,421 +asyncpg/pgproto/hton.pxd,sha256=rt7WLZasAJplt3o1MYsIQlfTZGbecpgdLoQxOcJD-_Q,977 +asyncpg/pgproto/pgproto.cp310-win_amd64.pyd,sha256=VZClLOijktndKRF8urVKqA5PS-D76jI7Mqzt3Fg4-3M,231424 +asyncpg/pgproto/pgproto.pxd,sha256=vfSBam43bOQKliL0fGdt8608D_pBXXb-idnSuT6orUY,449 +asyncpg/pgproto/pgproto.pyx,sha256=bjCy8iIZj7IFj0BRegv87xpoYQkzrFvPFEyowko8nlg,1298 +asyncpg/pgproto/tohex.pxd,sha256=KhhZRaf-jl0jzOk_-XSBrd0asszmVFZ1CHaeCVE-unU,371 +asyncpg/pgproto/types.py,sha256=QLr97YbxULqDA_v3idn3SKWmKZHyn4ea1zJzG9AHRcQ,13360 +asyncpg/pgproto/uuid.pyx,sha256=zhVNDQwWjfH2PNwg8Afe949fDT-wYMBfOXjf2AdJ9n4,9946 +asyncpg/pool.py,sha256=PlgKUeSN57Utr32lX7EsbyQ6gdXmA6F3m3iWOccxnu8,40184 +asyncpg/prepared_stmt.py,sha256=MkwHPt_uvNiLxpC3TJ7irGBaTcqRc7NdM3Mgme4JZv8,9251 +asyncpg/protocol/__init__.py,sha256=adVXDmUi0IYdAfRnqYXGP4o0Uyjv2DJfr8FrXRlT5eY,313 +asyncpg/protocol/__pycache__/__init__.cpython-310.pyc,, +asyncpg/protocol/codecs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asyncpg/protocol/codecs/__pycache__/__init__.cpython-310.pyc,, +asyncpg/protocol/codecs/array.pyx,sha256=OIkcbecdoh84ZPfT26nElg_yZp3EsCH8vkwuVMhycBg,30361 +asyncpg/protocol/codecs/base.pxd,sha256=BeFclKvmQxcsbp2knwPnGLPOyq6473kAAPUP_seW1FE,6291 +asyncpg/protocol/codecs/base.pyx,sha256=6VOdg3Qv3VAIT5Mtk5tB7rybUiZGm5x-hqggKceLIhw,33257 +asyncpg/protocol/codecs/pgproto.pyx,sha256=vKWJ76Ht6bERXi-8gZAjiKOkZ29wJFpDcVt6or9SQO8,17659 +asyncpg/protocol/codecs/range.pyx,sha256=q7AprJMdRbF5R0hNLXQz42y1CSYiL2z51DHmPCX95Eo,6566 +asyncpg/protocol/codecs/record.pyx,sha256=RzE8ypCFSPiH4pJdEX6178UEd5TStpG3lwd0SEILhfI,2433 +asyncpg/protocol/codecs/textutils.pyx,sha256=O3QiS3R1_IDLT5U29cq3Nb_fLUoP-evX4ff93dW8Wrk,2110 +asyncpg/protocol/consts.pxi,sha256=wyf2ctwkBArBS20kiKZ9MuwEdHIORbjaWUmLAGT5grg,393 +asyncpg/protocol/coreproto.pxd,sha256=xxlUt-Y4g1rOFws4IlLZEg3KM1LYqs2BZM2bbB2eb3M,6269 +asyncpg/protocol/coreproto.pyx,sha256=jQnXoKCykvGxCzrB71ZVh2FpFR3v47AdED7CXthQoig,38618 +asyncpg/protocol/cpythonx.pxd,sha256=N0sNOn2zYq9LdVpCbk5l-UQEXZKJxlgTBEvuJRIia4E,632 +asyncpg/protocol/encodings.pyx,sha256=lq0YrGqKOS3S4bnfecDP4ReJwmpJFkoJDCWxdGa-QaM,1707 +asyncpg/protocol/pgtypes.pxi,sha256=WMJQUjwky0BRGoz1Uv-s55Sn9cn3fp_z-2s7ySErxCc,7190 +asyncpg/protocol/prepared_stmt.pxd,sha256=5mzS5HRtwWw7fY3s88LFNB9ETdUVK1gKyLTR0FiyLFo,1122 +asyncpg/protocol/prepared_stmt.pyx,sha256=vaGdq92u-Pt77wRF4NsYbUTKL3JNo6ZpRpm9ukONovw,13204 +asyncpg/protocol/protocol.cp310-win_amd64.pyd,sha256=8mGLyG66G4UMDLmF4VLqw_KFWh7JxPPH7f5CFHMd9ys,635392 +asyncpg/protocol/protocol.pxd,sha256=vuxzb36lNZA0bcxG-JBtXbv6wGklrJzkawA72UWqQSo,2096 +asyncpg/protocol/protocol.pyx,sha256=njfmfldJv95mBVTYv_YmS_bMYKDvZFPca7XwXqmGpMs,35000 +asyncpg/protocol/scram.pxd,sha256=WHz6b-CFCPI3rePwxK096bZQR0mvI1iYprQ-ZuhrWdk,1330 +asyncpg/protocol/scram.pyx,sha256=pF1Hgp_9cgSOEBmMZToNaNgdxqV6-2Ug-Am0as-yq3s,14933 +asyncpg/protocol/settings.pxd,sha256=k1guxt38rbdlz1_ArDgXNSLUl3IcCfOyXWj1I4RoARY,1085 +asyncpg/protocol/settings.pyx,sha256=hBECqeNNPzGGIiZOb-2qgV5QDyyotQoa2LEi4YGgKOg,3800 +asyncpg/serverversion.py,sha256=WpWvDakphms881RaToOyHKAVS6wxaJMQfzetXJJsLnM,1850 +asyncpg/transaction.py,sha256=sahDihA507PDgrdNQjjLgYTCGXQWUSe4hLqpPQ0I5mg,8535 +asyncpg/types.py,sha256=eVh6rdPWpcMWauLRMRUEuWniEVAMAbaniDucwcBwQkk,4830 +asyncpg/utils.py,sha256=38wIII-s9AOC-2cPFz6rX0hnEHNtSMSNdih-Mti2cyc,1412 diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/REQUESTED b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/WHEEL b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/WHEEL new file mode 100644 index 0000000..93f1ef4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp310-cp310-win_amd64 + diff --git a/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/top_level.txt new file mode 100644 index 0000000..5789edd --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg-0.27.0.dist-info/top_level.txt @@ -0,0 +1 @@ +asyncpg diff --git a/sbsheriff/Lib/site-packages/asyncpg/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/__init__.py new file mode 100644 index 0000000..e8cd11e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/__init__.py @@ -0,0 +1,19 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from .connection import connect, Connection # NOQA +from .exceptions import * # NOQA +from .pool import create_pool, Pool # NOQA +from .protocol import Record # NOQA +from .types import * # NOQA + + +from ._version import __version__ # NOQA + + +__all__ = ('connect', 'create_pool', 'Pool', 'Record', 'Connection') +__all__ += exceptions.__all__ # NOQA diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..302f09a Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/_version.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000..a5921e2 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/_version.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cluster.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cluster.cpython-310.pyc new file mode 100644 index 0000000..b8e4404 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cluster.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/compat.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000..b9e6b5e Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/compat.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connect_utils.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connect_utils.cpython-310.pyc new file mode 100644 index 0000000..8e2e4e9 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connect_utils.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connection.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connection.cpython-310.pyc new file mode 100644 index 0000000..d4df4e2 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connection.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connresource.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connresource.cpython-310.pyc new file mode 100644 index 0000000..c0f9c31 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/connresource.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cursor.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cursor.cpython-310.pyc new file mode 100644 index 0000000..2824db1 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/cursor.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/introspection.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/introspection.cpython-310.pyc new file mode 100644 index 0000000..b71b824 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/introspection.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/pool.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/pool.cpython-310.pyc new file mode 100644 index 0000000..d5ee5cb Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/pool.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/prepared_stmt.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/prepared_stmt.cpython-310.pyc new file mode 100644 index 0000000..bdc6c11 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/prepared_stmt.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/serverversion.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/serverversion.cpython-310.pyc new file mode 100644 index 0000000..90170bc Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/serverversion.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/transaction.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/transaction.cpython-310.pyc new file mode 100644 index 0000000..9329f7f Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/transaction.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/types.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000..c21fdbd Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/types.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/__pycache__/utils.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..4f39598 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/__pycache__/utils.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/_testbase/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__init__.py new file mode 100644 index 0000000..9944b20 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__init__.py @@ -0,0 +1,437 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import atexit +import contextlib +import functools +import inspect +import logging +import os +import re +import textwrap +import time +import traceback +import unittest + + +import asyncpg +from asyncpg import cluster as pg_cluster +from asyncpg import connection as pg_connection +from asyncpg import pool as pg_pool + +from . import fuzzer + + +@contextlib.contextmanager +def silence_asyncio_long_exec_warning(): + def flt(log_record): + msg = log_record.getMessage() + return not msg.startswith('Executing ') + + logger = logging.getLogger('asyncio') + logger.addFilter(flt) + try: + yield + finally: + logger.removeFilter(flt) + + +def with_timeout(timeout): + def wrap(func): + func.__timeout__ = timeout + return func + + return wrap + + +class TestCaseMeta(type(unittest.TestCase)): + TEST_TIMEOUT = None + + @staticmethod + def _iter_methods(bases, ns): + for base in bases: + for methname in dir(base): + if not methname.startswith('test_'): + continue + + meth = getattr(base, methname) + if not inspect.iscoroutinefunction(meth): + continue + + yield methname, meth + + for methname, meth in ns.items(): + if not methname.startswith('test_'): + continue + + if not inspect.iscoroutinefunction(meth): + continue + + yield methname, meth + + def __new__(mcls, name, bases, ns): + for methname, meth in mcls._iter_methods(bases, ns): + @functools.wraps(meth) + def wrapper(self, *args, __meth__=meth, **kwargs): + coro = __meth__(self, *args, **kwargs) + timeout = getattr(__meth__, '__timeout__', mcls.TEST_TIMEOUT) + if timeout: + coro = asyncio.wait_for(coro, timeout) + try: + self.loop.run_until_complete(coro) + except asyncio.TimeoutError: + raise self.failureException( + 'test timed out after {} seconds'.format( + timeout)) from None + else: + self.loop.run_until_complete(coro) + ns[methname] = wrapper + + return super().__new__(mcls, name, bases, ns) + + +class TestCase(unittest.TestCase, metaclass=TestCaseMeta): + + @classmethod + def setUpClass(cls): + if os.environ.get('USE_UVLOOP'): + import uvloop + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(None) + cls.loop = loop + + @classmethod + def tearDownClass(cls): + cls.loop.close() + asyncio.set_event_loop(None) + + def setUp(self): + self.loop.set_exception_handler(self.loop_exception_handler) + self.__unhandled_exceptions = [] + + def tearDown(self): + if self.__unhandled_exceptions: + formatted = [] + + for i, context in enumerate(self.__unhandled_exceptions): + formatted.append(self._format_loop_exception(context, i + 1)) + + self.fail( + 'unexpected exceptions in asynchronous code:\n' + + '\n'.join(formatted)) + + @contextlib.contextmanager + def assertRunUnder(self, delta): + st = time.monotonic() + try: + yield + finally: + elapsed = time.monotonic() - st + if elapsed > delta: + raise AssertionError( + 'running block took {:0.3f}s which is longer ' + 'than the expected maximum of {:0.3f}s'.format( + elapsed, delta)) + + @contextlib.contextmanager + def assertLoopErrorHandlerCalled(self, msg_re: str): + contexts = [] + + def handler(loop, ctx): + contexts.append(ctx) + + old_handler = self.loop.get_exception_handler() + self.loop.set_exception_handler(handler) + try: + yield + + for ctx in contexts: + msg = ctx.get('message') + if msg and re.search(msg_re, msg): + return + + raise AssertionError( + 'no message matching {!r} was logged with ' + 'loop.call_exception_handler()'.format(msg_re)) + + finally: + self.loop.set_exception_handler(old_handler) + + def loop_exception_handler(self, loop, context): + self.__unhandled_exceptions.append(context) + loop.default_exception_handler(context) + + def _format_loop_exception(self, context, n): + message = context.get('message', 'Unhandled exception in event loop') + exception = context.get('exception') + if exception is not None: + exc_info = (type(exception), exception, exception.__traceback__) + else: + exc_info = None + + lines = [] + for key in sorted(context): + if key in {'message', 'exception'}: + continue + value = context[key] + if key == 'source_traceback': + tb = ''.join(traceback.format_list(value)) + value = 'Object created at (most recent call last):\n' + value += tb.rstrip() + else: + try: + value = repr(value) + except Exception as ex: + value = ('Exception in __repr__ {!r}; ' + 'value type: {!r}'.format(ex, type(value))) + lines.append('[{}]: {}\n\n'.format(key, value)) + + if exc_info is not None: + lines.append('[exception]:\n') + formatted_exc = textwrap.indent( + ''.join(traceback.format_exception(*exc_info)), ' ') + lines.append(formatted_exc) + + details = textwrap.indent(''.join(lines), ' ') + return '{:02d}. {}:\n{}\n'.format(n, message, details) + + +_default_cluster = None + + +def _init_cluster(ClusterCls, cluster_kwargs, initdb_options=None): + cluster = ClusterCls(**cluster_kwargs) + cluster.init(**(initdb_options or {})) + cluster.trust_local_connections() + atexit.register(_shutdown_cluster, cluster) + return cluster + + +def _start_cluster(ClusterCls, cluster_kwargs, server_settings, + initdb_options=None): + cluster = _init_cluster(ClusterCls, cluster_kwargs, initdb_options) + cluster.start(port='dynamic', server_settings=server_settings) + return cluster + + +def _get_initdb_options(initdb_options=None): + if not initdb_options: + initdb_options = {} + else: + initdb_options = dict(initdb_options) + + # Make the default superuser name stable. + if 'username' not in initdb_options: + initdb_options['username'] = 'postgres' + + return initdb_options + + +def _init_default_cluster(initdb_options=None): + global _default_cluster + + if _default_cluster is None: + pg_host = os.environ.get('PGHOST') + if pg_host: + # Using existing cluster, assuming it is initialized and running + _default_cluster = pg_cluster.RunningCluster() + else: + _default_cluster = _init_cluster( + pg_cluster.TempCluster, cluster_kwargs={}, + initdb_options=_get_initdb_options(initdb_options)) + + return _default_cluster + + +def _shutdown_cluster(cluster): + if cluster.get_status() == 'running': + cluster.stop() + if cluster.get_status() != 'not-initialized': + cluster.destroy() + + +def create_pool(dsn=None, *, + min_size=10, + max_size=10, + max_queries=50000, + max_inactive_connection_lifetime=60.0, + setup=None, + init=None, + loop=None, + pool_class=pg_pool.Pool, + connection_class=pg_connection.Connection, + record_class=asyncpg.Record, + **connect_kwargs): + return pool_class( + dsn, + min_size=min_size, max_size=max_size, + max_queries=max_queries, loop=loop, setup=setup, init=init, + max_inactive_connection_lifetime=max_inactive_connection_lifetime, + connection_class=connection_class, + record_class=record_class, + **connect_kwargs) + + +class ClusterTestCase(TestCase): + @classmethod + def get_server_settings(cls): + settings = { + 'log_connections': 'on' + } + + if cls.cluster.get_pg_version() >= (11, 0): + # JITting messes up timing tests, and + # is not essential for testing. + settings['jit'] = 'off' + + return settings + + @classmethod + def new_cluster(cls, ClusterCls, *, cluster_kwargs={}, initdb_options={}): + cluster = _init_cluster(ClusterCls, cluster_kwargs, + _get_initdb_options(initdb_options)) + cls._clusters.append(cluster) + return cluster + + @classmethod + def start_cluster(cls, cluster, *, server_settings={}): + cluster.start(port='dynamic', server_settings=server_settings) + + @classmethod + def setup_cluster(cls): + cls.cluster = _init_default_cluster() + + if cls.cluster.get_status() != 'running': + cls.cluster.start( + port='dynamic', server_settings=cls.get_server_settings()) + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls._clusters = [] + cls.setup_cluster() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + for cluster in cls._clusters: + if cluster is not _default_cluster: + cluster.stop() + cluster.destroy() + cls._clusters = [] + + @classmethod + def get_connection_spec(cls, kwargs={}): + conn_spec = cls.cluster.get_connection_spec() + if kwargs.get('dsn'): + conn_spec.pop('host') + conn_spec.update(kwargs) + if not os.environ.get('PGHOST') and not kwargs.get('dsn'): + if 'database' not in conn_spec: + conn_spec['database'] = 'postgres' + if 'user' not in conn_spec: + conn_spec['user'] = 'postgres' + return conn_spec + + @classmethod + def connect(cls, **kwargs): + conn_spec = cls.get_connection_spec(kwargs) + return pg_connection.connect(**conn_spec, loop=cls.loop) + + def setUp(self): + super().setUp() + self._pools = [] + + def tearDown(self): + super().tearDown() + for pool in self._pools: + pool.terminate() + self._pools = [] + + def create_pool(self, pool_class=pg_pool.Pool, + connection_class=pg_connection.Connection, **kwargs): + conn_spec = self.get_connection_spec(kwargs) + pool = create_pool(loop=self.loop, pool_class=pool_class, + connection_class=connection_class, **conn_spec) + self._pools.append(pool) + return pool + + +class ProxiedClusterTestCase(ClusterTestCase): + @classmethod + def get_server_settings(cls): + settings = dict(super().get_server_settings()) + settings['listen_addresses'] = '127.0.0.1' + return settings + + @classmethod + def get_proxy_settings(cls): + return {'fuzzing-mode': None} + + @classmethod + def setUpClass(cls): + super().setUpClass() + conn_spec = cls.cluster.get_connection_spec() + host = conn_spec.get('host') + if not host: + host = '127.0.0.1' + elif host.startswith('/'): + host = '127.0.0.1' + cls.proxy = fuzzer.TCPFuzzingProxy( + backend_host=host, + backend_port=conn_spec['port'], + ) + cls.proxy.start() + + @classmethod + def tearDownClass(cls): + cls.proxy.stop() + super().tearDownClass() + + @classmethod + def get_connection_spec(cls, kwargs): + conn_spec = super().get_connection_spec(kwargs) + conn_spec['host'] = cls.proxy.listening_addr + conn_spec['port'] = cls.proxy.listening_port + return conn_spec + + def tearDown(self): + self.proxy.reset() + super().tearDown() + + +def with_connection_options(**options): + if not options: + raise ValueError('no connection options were specified') + + def wrap(func): + func.__connect_options__ = options + return func + + return wrap + + +class ConnectedTestCase(ClusterTestCase): + + def setUp(self): + super().setUp() + + # Extract options set up with `with_connection_options`. + test_func = getattr(self, self._testMethodName).__func__ + opts = getattr(test_func, '__connect_options__', {}) + self.con = self.loop.run_until_complete(self.connect(**opts)) + self.server_version = self.con.get_server_version() + + def tearDown(self): + try: + self.loop.run_until_complete(self.con.close()) + self.con = None + finally: + super().tearDown() diff --git a/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..c5a08e0 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/fuzzer.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/fuzzer.cpython-310.pyc new file mode 100644 index 0000000..cf6d772 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/_testbase/__pycache__/fuzzer.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/_testbase/fuzzer.py b/sbsheriff/Lib/site-packages/asyncpg/_testbase/fuzzer.py new file mode 100644 index 0000000..8874564 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/_testbase/fuzzer.py @@ -0,0 +1,306 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import socket +import threading +import typing + +from asyncpg import cluster + + +class StopServer(Exception): + pass + + +class TCPFuzzingProxy: + def __init__(self, *, listening_addr: str='127.0.0.1', + listening_port: typing.Optional[int]=None, + backend_host: str, backend_port: int, + settings: typing.Optional[dict]=None) -> None: + self.listening_addr = listening_addr + self.listening_port = listening_port + self.backend_host = backend_host + self.backend_port = backend_port + self.settings = settings or {} + self.loop = None + self.connectivity = None + self.connectivity_loss = None + self.stop_event = None + self.connections = {} + self.sock = None + self.listen_task = None + + async def _wait(self, work): + work_task = asyncio.ensure_future(work) + stop_event_task = asyncio.ensure_future(self.stop_event.wait()) + + try: + await asyncio.wait( + [work_task, stop_event_task], + return_when=asyncio.FIRST_COMPLETED) + + if self.stop_event.is_set(): + raise StopServer() + else: + return work_task.result() + finally: + if not work_task.done(): + work_task.cancel() + if not stop_event_task.done(): + stop_event_task.cancel() + + def start(self): + started = threading.Event() + self.thread = threading.Thread( + target=self._start_thread, args=(started,)) + self.thread.start() + if not started.wait(timeout=2): + raise RuntimeError('fuzzer proxy failed to start') + + def stop(self): + self.loop.call_soon_threadsafe(self._stop) + self.thread.join() + + def _stop(self): + self.stop_event.set() + + def _start_thread(self, started_event): + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + self.connectivity = asyncio.Event() + self.connectivity.set() + self.connectivity_loss = asyncio.Event() + self.stop_event = asyncio.Event() + + if self.listening_port is None: + self.listening_port = cluster.find_available_port() + + self.sock = socket.socket() + self.sock.bind((self.listening_addr, self.listening_port)) + self.sock.listen(50) + self.sock.setblocking(False) + + try: + self.loop.run_until_complete(self._main(started_event)) + finally: + self.loop.close() + + async def _main(self, started_event): + self.listen_task = asyncio.ensure_future(self.listen()) + # Notify the main thread that we are ready to go. + started_event.set() + try: + await self.listen_task + finally: + for c in list(self.connections): + c.close() + await asyncio.sleep(0.01) + if hasattr(self.loop, 'remove_reader'): + self.loop.remove_reader(self.sock.fileno()) + self.sock.close() + + async def listen(self): + while True: + try: + client_sock, _ = await self._wait( + self.loop.sock_accept(self.sock)) + + backend_sock = socket.socket() + backend_sock.setblocking(False) + + await self._wait(self.loop.sock_connect( + backend_sock, (self.backend_host, self.backend_port))) + except StopServer: + break + + conn = Connection(client_sock, backend_sock, self) + conn_task = self.loop.create_task(conn.handle()) + self.connections[conn] = conn_task + + def trigger_connectivity_loss(self): + self.loop.call_soon_threadsafe(self._trigger_connectivity_loss) + + def _trigger_connectivity_loss(self): + self.connectivity.clear() + self.connectivity_loss.set() + + def restore_connectivity(self): + self.loop.call_soon_threadsafe(self._restore_connectivity) + + def _restore_connectivity(self): + self.connectivity.set() + self.connectivity_loss.clear() + + def reset(self): + self.restore_connectivity() + + def _close_connection(self, connection): + conn_task = self.connections.pop(connection, None) + if conn_task is not None: + conn_task.cancel() + + def close_all_connections(self): + for conn in list(self.connections): + self.loop.call_soon_threadsafe(self._close_connection, conn) + + +class Connection: + def __init__(self, client_sock, backend_sock, proxy): + self.client_sock = client_sock + self.backend_sock = backend_sock + self.proxy = proxy + self.loop = proxy.loop + self.connectivity = proxy.connectivity + self.connectivity_loss = proxy.connectivity_loss + self.proxy_to_backend_task = None + self.proxy_from_backend_task = None + self.is_closed = False + + def close(self): + if self.is_closed: + return + + self.is_closed = True + + if self.proxy_to_backend_task is not None: + self.proxy_to_backend_task.cancel() + self.proxy_to_backend_task = None + + if self.proxy_from_backend_task is not None: + self.proxy_from_backend_task.cancel() + self.proxy_from_backend_task = None + + self.proxy._close_connection(self) + + async def handle(self): + self.proxy_to_backend_task = asyncio.ensure_future( + self.proxy_to_backend()) + + self.proxy_from_backend_task = asyncio.ensure_future( + self.proxy_from_backend()) + + try: + await asyncio.wait( + [self.proxy_to_backend_task, self.proxy_from_backend_task], + return_when=asyncio.FIRST_COMPLETED) + + finally: + if self.proxy_to_backend_task is not None: + self.proxy_to_backend_task.cancel() + + if self.proxy_from_backend_task is not None: + self.proxy_from_backend_task.cancel() + + # Asyncio fails to properly remove the readers and writers + # when the task doing recv() or send() is cancelled, so + # we must remove the readers and writers manually before + # closing the sockets. + self.loop.remove_reader(self.client_sock.fileno()) + self.loop.remove_writer(self.client_sock.fileno()) + self.loop.remove_reader(self.backend_sock.fileno()) + self.loop.remove_writer(self.backend_sock.fileno()) + + self.client_sock.close() + self.backend_sock.close() + + async def _read(self, sock, n): + read_task = asyncio.ensure_future( + self.loop.sock_recv(sock, n)) + conn_event_task = asyncio.ensure_future( + self.connectivity_loss.wait()) + + try: + await asyncio.wait( + [read_task, conn_event_task], + return_when=asyncio.FIRST_COMPLETED) + + if self.connectivity_loss.is_set(): + return None + else: + return read_task.result() + finally: + if not self.loop.is_closed(): + if not read_task.done(): + read_task.cancel() + if not conn_event_task.done(): + conn_event_task.cancel() + + async def _write(self, sock, data): + write_task = asyncio.ensure_future( + self.loop.sock_sendall(sock, data)) + conn_event_task = asyncio.ensure_future( + self.connectivity_loss.wait()) + + try: + await asyncio.wait( + [write_task, conn_event_task], + return_when=asyncio.FIRST_COMPLETED) + + if self.connectivity_loss.is_set(): + return None + else: + return write_task.result() + finally: + if not self.loop.is_closed(): + if not write_task.done(): + write_task.cancel() + if not conn_event_task.done(): + conn_event_task.cancel() + + async def proxy_to_backend(self): + buf = None + + try: + while True: + await self.connectivity.wait() + if buf is not None: + data = buf + buf = None + else: + data = await self._read(self.client_sock, 4096) + if data == b'': + break + if self.connectivity_loss.is_set(): + if data: + buf = data + continue + await self._write(self.backend_sock, data) + + except ConnectionError: + pass + + finally: + if not self.loop.is_closed(): + self.loop.call_soon(self.close) + + async def proxy_from_backend(self): + buf = None + + try: + while True: + await self.connectivity.wait() + if buf is not None: + data = buf + buf = None + else: + data = await self._read(self.backend_sock, 4096) + if data == b'': + break + if self.connectivity_loss.is_set(): + if data: + buf = data + continue + await self._write(self.client_sock, data) + + except ConnectionError: + pass + + finally: + if not self.loop.is_closed(): + self.loop.call_soon(self.close) diff --git a/sbsheriff/Lib/site-packages/asyncpg/_version.py b/sbsheriff/Lib/site-packages/asyncpg/_version.py new file mode 100644 index 0000000..263da2e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/_version.py @@ -0,0 +1,13 @@ +# This file MUST NOT contain anything but the __version__ assignment. +# +# When making a release, change the value of __version__ +# to an appropriate value, and open a pull request against +# the correct branch (master if making a new feature release). +# The commit message MUST contain a properly formatted release +# log, and the commit must be signed. +# +# The release automation will: build and test the packages for the +# supported platforms, publish the packages on PyPI, merge the PR +# to the target branch, create a Git tag pointing to the commit. + +__version__ = '0.27.0' diff --git a/sbsheriff/Lib/site-packages/asyncpg/cluster.py b/sbsheriff/Lib/site-packages/asyncpg/cluster.py new file mode 100644 index 0000000..0999e41 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/cluster.py @@ -0,0 +1,688 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import os +import os.path +import platform +import re +import shutil +import socket +import subprocess +import sys +import tempfile +import textwrap +import time + +import asyncpg +from asyncpg import serverversion + + +_system = platform.uname().system + +if _system == 'Windows': + def platform_exe(name): + if name.endswith('.exe'): + return name + return name + '.exe' +else: + def platform_exe(name): + return name + + +def find_available_port(): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + sock.bind(('127.0.0.1', 0)) + return sock.getsockname()[1] + except Exception: + return None + finally: + sock.close() + + +class ClusterError(Exception): + pass + + +class Cluster: + def __init__(self, data_dir, *, pg_config_path=None): + self._data_dir = data_dir + self._pg_config_path = pg_config_path + self._pg_bin_dir = ( + os.environ.get('PGINSTALLATION') + or os.environ.get('PGBIN') + ) + self._pg_ctl = None + self._daemon_pid = None + self._daemon_process = None + self._connection_addr = None + self._connection_spec_override = None + + def get_pg_version(self): + return self._pg_version + + def is_managed(self): + return True + + def get_data_dir(self): + return self._data_dir + + def get_status(self): + if self._pg_ctl is None: + self._init_env() + + process = subprocess.run( + [self._pg_ctl, 'status', '-D', self._data_dir], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.stdout, process.stderr + + if (process.returncode == 4 or not os.path.exists(self._data_dir) or + not os.listdir(self._data_dir)): + return 'not-initialized' + elif process.returncode == 3: + return 'stopped' + elif process.returncode == 0: + r = re.match(r'.*PID\s?:\s+(\d+).*', stdout.decode()) + if not r: + raise ClusterError( + 'could not parse pg_ctl status output: {}'.format( + stdout.decode())) + self._daemon_pid = int(r.group(1)) + return self._test_connection(timeout=0) + else: + raise ClusterError( + 'pg_ctl status exited with status {:d}: {}'.format( + process.returncode, stderr)) + + async def connect(self, loop=None, **kwargs): + conn_info = self.get_connection_spec() + conn_info.update(kwargs) + return await asyncpg.connect(loop=loop, **conn_info) + + def init(self, **settings): + """Initialize cluster.""" + if self.get_status() != 'not-initialized': + raise ClusterError( + 'cluster in {!r} has already been initialized'.format( + self._data_dir)) + + settings = dict(settings) + if 'encoding' not in settings: + settings['encoding'] = 'UTF-8' + + if settings: + settings_args = ['--{}={}'.format(k, v) + for k, v in settings.items()] + extra_args = ['-o'] + [' '.join(settings_args)] + else: + extra_args = [] + + process = subprocess.run( + [self._pg_ctl, 'init', '-D', self._data_dir] + extra_args, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + output = process.stdout + + if process.returncode != 0: + raise ClusterError( + 'pg_ctl init exited with status {:d}:\n{}'.format( + process.returncode, output.decode())) + + return output.decode() + + def start(self, wait=60, *, server_settings={}, **opts): + """Start the cluster.""" + status = self.get_status() + if status == 'running': + return + elif status == 'not-initialized': + raise ClusterError( + 'cluster in {!r} has not been initialized'.format( + self._data_dir)) + + port = opts.pop('port', None) + if port == 'dynamic': + port = find_available_port() + + extra_args = ['--{}={}'.format(k, v) for k, v in opts.items()] + extra_args.append('--port={}'.format(port)) + + sockdir = server_settings.get('unix_socket_directories') + if sockdir is None: + sockdir = server_settings.get('unix_socket_directory') + if sockdir is None and _system != 'Windows': + sockdir = tempfile.gettempdir() + + ssl_key = server_settings.get('ssl_key_file') + if ssl_key: + # Make sure server certificate key file has correct permissions. + keyfile = os.path.join(self._data_dir, 'srvkey.pem') + shutil.copy(ssl_key, keyfile) + os.chmod(keyfile, 0o600) + server_settings = server_settings.copy() + server_settings['ssl_key_file'] = keyfile + + if sockdir is not None: + if self._pg_version < (9, 3): + sockdir_opt = 'unix_socket_directory' + else: + sockdir_opt = 'unix_socket_directories' + + server_settings[sockdir_opt] = sockdir + + for k, v in server_settings.items(): + extra_args.extend(['-c', '{}={}'.format(k, v)]) + + if _system == 'Windows': + # On Windows we have to use pg_ctl as direct execution + # of postgres daemon under an Administrative account + # is not permitted and there is no easy way to drop + # privileges. + if os.getenv('ASYNCPG_DEBUG_SERVER'): + stdout = sys.stdout + print( + 'asyncpg.cluster: Running', + ' '.join([ + self._pg_ctl, 'start', '-D', self._data_dir, + '-o', ' '.join(extra_args) + ]), + file=sys.stderr, + ) + else: + stdout = subprocess.DEVNULL + + process = subprocess.run( + [self._pg_ctl, 'start', '-D', self._data_dir, + '-o', ' '.join(extra_args)], + stdout=stdout, stderr=subprocess.STDOUT) + + if process.returncode != 0: + if process.stderr: + stderr = ':\n{}'.format(process.stderr.decode()) + else: + stderr = '' + raise ClusterError( + 'pg_ctl start exited with status {:d}{}'.format( + process.returncode, stderr)) + else: + if os.getenv('ASYNCPG_DEBUG_SERVER'): + stdout = sys.stdout + else: + stdout = subprocess.DEVNULL + + self._daemon_process = \ + subprocess.Popen( + [self._postgres, '-D', self._data_dir, *extra_args], + stdout=stdout, stderr=subprocess.STDOUT) + + self._daemon_pid = self._daemon_process.pid + + self._test_connection(timeout=wait) + + def reload(self): + """Reload server configuration.""" + status = self.get_status() + if status != 'running': + raise ClusterError('cannot reload: cluster is not running') + + process = subprocess.run( + [self._pg_ctl, 'reload', '-D', self._data_dir], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + stderr = process.stderr + + if process.returncode != 0: + raise ClusterError( + 'pg_ctl stop exited with status {:d}: {}'.format( + process.returncode, stderr.decode())) + + def stop(self, wait=60): + process = subprocess.run( + [self._pg_ctl, 'stop', '-D', self._data_dir, '-t', str(wait), + '-m', 'fast'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + stderr = process.stderr + + if process.returncode != 0: + raise ClusterError( + 'pg_ctl stop exited with status {:d}: {}'.format( + process.returncode, stderr.decode())) + + if (self._daemon_process is not None and + self._daemon_process.returncode is None): + self._daemon_process.kill() + + def destroy(self): + status = self.get_status() + if status == 'stopped' or status == 'not-initialized': + shutil.rmtree(self._data_dir) + else: + raise ClusterError('cannot destroy {} cluster'.format(status)) + + def _get_connection_spec(self): + if self._connection_addr is None: + self._connection_addr = self._connection_addr_from_pidfile() + + if self._connection_addr is not None: + if self._connection_spec_override: + args = self._connection_addr.copy() + args.update(self._connection_spec_override) + return args + else: + return self._connection_addr + + def get_connection_spec(self): + status = self.get_status() + if status != 'running': + raise ClusterError('cluster is not running') + + return self._get_connection_spec() + + def override_connection_spec(self, **kwargs): + self._connection_spec_override = kwargs + + def reset_wal(self, *, oid=None, xid=None): + status = self.get_status() + if status == 'not-initialized': + raise ClusterError( + 'cannot modify WAL status: cluster is not initialized') + + if status == 'running': + raise ClusterError( + 'cannot modify WAL status: cluster is running') + + opts = [] + if oid is not None: + opts.extend(['-o', str(oid)]) + if xid is not None: + opts.extend(['-x', str(xid)]) + if not opts: + return + + opts.append(self._data_dir) + + try: + reset_wal = self._find_pg_binary('pg_resetwal') + except ClusterError: + reset_wal = self._find_pg_binary('pg_resetxlog') + + process = subprocess.run( + [reset_wal] + opts, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + stderr = process.stderr + + if process.returncode != 0: + raise ClusterError( + 'pg_resetwal exited with status {:d}: {}'.format( + process.returncode, stderr.decode())) + + def reset_hba(self): + """Remove all records from pg_hba.conf.""" + status = self.get_status() + if status == 'not-initialized': + raise ClusterError( + 'cannot modify HBA records: cluster is not initialized') + + pg_hba = os.path.join(self._data_dir, 'pg_hba.conf') + + try: + with open(pg_hba, 'w'): + pass + except IOError as e: + raise ClusterError( + 'cannot modify HBA records: {}'.format(e)) from e + + def add_hba_entry(self, *, type='host', database, user, address=None, + auth_method, auth_options=None): + """Add a record to pg_hba.conf.""" + status = self.get_status() + if status == 'not-initialized': + raise ClusterError( + 'cannot modify HBA records: cluster is not initialized') + + if type not in {'local', 'host', 'hostssl', 'hostnossl'}: + raise ValueError('invalid HBA record type: {!r}'.format(type)) + + pg_hba = os.path.join(self._data_dir, 'pg_hba.conf') + + record = '{} {} {}'.format(type, database, user) + + if type != 'local': + if address is None: + raise ValueError( + '{!r} entry requires a valid address'.format(type)) + else: + record += ' {}'.format(address) + + record += ' {}'.format(auth_method) + + if auth_options is not None: + record += ' ' + ' '.join( + '{}={}'.format(k, v) for k, v in auth_options) + + try: + with open(pg_hba, 'a') as f: + print(record, file=f) + except IOError as e: + raise ClusterError( + 'cannot modify HBA records: {}'.format(e)) from e + + def trust_local_connections(self): + self.reset_hba() + + if _system != 'Windows': + self.add_hba_entry(type='local', database='all', + user='all', auth_method='trust') + self.add_hba_entry(type='host', address='127.0.0.1/32', + database='all', user='all', + auth_method='trust') + self.add_hba_entry(type='host', address='::1/128', + database='all', user='all', + auth_method='trust') + status = self.get_status() + if status == 'running': + self.reload() + + def trust_local_replication_by(self, user): + if _system != 'Windows': + self.add_hba_entry(type='local', database='replication', + user=user, auth_method='trust') + self.add_hba_entry(type='host', address='127.0.0.1/32', + database='replication', user=user, + auth_method='trust') + self.add_hba_entry(type='host', address='::1/128', + database='replication', user=user, + auth_method='trust') + status = self.get_status() + if status == 'running': + self.reload() + + def _init_env(self): + if not self._pg_bin_dir: + pg_config = self._find_pg_config(self._pg_config_path) + pg_config_data = self._run_pg_config(pg_config) + + self._pg_bin_dir = pg_config_data.get('bindir') + if not self._pg_bin_dir: + raise ClusterError( + 'pg_config output did not provide the BINDIR value') + + self._pg_ctl = self._find_pg_binary('pg_ctl') + self._postgres = self._find_pg_binary('postgres') + self._pg_version = self._get_pg_version() + + def _connection_addr_from_pidfile(self): + pidfile = os.path.join(self._data_dir, 'postmaster.pid') + + try: + with open(pidfile, 'rt') as f: + piddata = f.read() + except FileNotFoundError: + return None + + lines = piddata.splitlines() + + if len(lines) < 6: + # A complete postgres pidfile is at least 6 lines + return None + + pmpid = int(lines[0]) + if self._daemon_pid and pmpid != self._daemon_pid: + # This might be an old pidfile left from previous postgres + # daemon run. + return None + + portnum = lines[3] + sockdir = lines[4] + hostaddr = lines[5] + + if sockdir: + if sockdir[0] != '/': + # Relative sockdir + sockdir = os.path.normpath( + os.path.join(self._data_dir, sockdir)) + host_str = sockdir + else: + host_str = hostaddr + + if host_str == '*': + host_str = 'localhost' + elif host_str == '0.0.0.0': + host_str = '127.0.0.1' + elif host_str == '::': + host_str = '::1' + + return { + 'host': host_str, + 'port': portnum + } + + def _test_connection(self, timeout=60): + self._connection_addr = None + + loop = asyncio.new_event_loop() + + try: + for i in range(timeout): + if self._connection_addr is None: + conn_spec = self._get_connection_spec() + if conn_spec is None: + time.sleep(1) + continue + + try: + con = loop.run_until_complete( + asyncpg.connect(database='postgres', + user='postgres', + timeout=5, loop=loop, + **self._connection_addr)) + except (OSError, asyncio.TimeoutError, + asyncpg.CannotConnectNowError, + asyncpg.PostgresConnectionError): + time.sleep(1) + continue + except asyncpg.PostgresError: + # Any other error other than ServerNotReadyError or + # ConnectionError is interpreted to indicate the server is + # up. + break + else: + loop.run_until_complete(con.close()) + break + finally: + loop.close() + + return 'running' + + def _run_pg_config(self, pg_config_path): + process = subprocess.run( + pg_config_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.stdout, process.stderr + + if process.returncode != 0: + raise ClusterError('pg_config exited with status {:d}: {}'.format( + process.returncode, stderr)) + else: + config = {} + + for line in stdout.splitlines(): + k, eq, v = line.decode('utf-8').partition('=') + if eq: + config[k.strip().lower()] = v.strip() + + return config + + def _find_pg_config(self, pg_config_path): + if pg_config_path is None: + pg_install = ( + os.environ.get('PGINSTALLATION') + or os.environ.get('PGBIN') + ) + if pg_install: + pg_config_path = platform_exe( + os.path.join(pg_install, 'pg_config')) + else: + pathenv = os.environ.get('PATH').split(os.pathsep) + for path in pathenv: + pg_config_path = platform_exe( + os.path.join(path, 'pg_config')) + if os.path.exists(pg_config_path): + break + else: + pg_config_path = None + + if not pg_config_path: + raise ClusterError('could not find pg_config executable') + + if not os.path.isfile(pg_config_path): + raise ClusterError('{!r} is not an executable'.format( + pg_config_path)) + + return pg_config_path + + def _find_pg_binary(self, binary): + bpath = platform_exe(os.path.join(self._pg_bin_dir, binary)) + + if not os.path.isfile(bpath): + raise ClusterError( + 'could not find {} executable: '.format(binary) + + '{!r} does not exist or is not a file'.format(bpath)) + + return bpath + + def _get_pg_version(self): + process = subprocess.run( + [self._postgres, '--version'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.stdout, process.stderr + + if process.returncode != 0: + raise ClusterError( + 'postgres --version exited with status {:d}: {}'.format( + process.returncode, stderr)) + + version_string = stdout.decode('utf-8').strip(' \n') + prefix = 'postgres (PostgreSQL) ' + if not version_string.startswith(prefix): + raise ClusterError( + 'could not determine server version from {!r}'.format( + version_string)) + version_string = version_string[len(prefix):] + + return serverversion.split_server_version_string(version_string) + + +class TempCluster(Cluster): + def __init__(self, *, + data_dir_suffix=None, data_dir_prefix=None, + data_dir_parent=None, pg_config_path=None): + self._data_dir = tempfile.mkdtemp(suffix=data_dir_suffix, + prefix=data_dir_prefix, + dir=data_dir_parent) + super().__init__(self._data_dir, pg_config_path=pg_config_path) + + +class HotStandbyCluster(TempCluster): + def __init__(self, *, + master, replication_user, + data_dir_suffix=None, data_dir_prefix=None, + data_dir_parent=None, pg_config_path=None): + self._master = master + self._repl_user = replication_user + super().__init__( + data_dir_suffix=data_dir_suffix, + data_dir_prefix=data_dir_prefix, + data_dir_parent=data_dir_parent, + pg_config_path=pg_config_path) + + def _init_env(self): + super()._init_env() + self._pg_basebackup = self._find_pg_binary('pg_basebackup') + + def init(self, **settings): + """Initialize cluster.""" + if self.get_status() != 'not-initialized': + raise ClusterError( + 'cluster in {!r} has already been initialized'.format( + self._data_dir)) + + process = subprocess.run( + [self._pg_basebackup, '-h', self._master['host'], + '-p', self._master['port'], '-D', self._data_dir, + '-U', self._repl_user], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + output = process.stdout + + if process.returncode != 0: + raise ClusterError( + 'pg_basebackup init exited with status {:d}:\n{}'.format( + process.returncode, output.decode())) + + if self._pg_version <= (11, 0): + with open(os.path.join(self._data_dir, 'recovery.conf'), 'w') as f: + f.write(textwrap.dedent("""\ + standby_mode = 'on' + primary_conninfo = 'host={host} port={port} user={user}' + """.format( + host=self._master['host'], + port=self._master['port'], + user=self._repl_user))) + else: + f = open(os.path.join(self._data_dir, 'standby.signal'), 'w') + f.close() + + return output.decode() + + def start(self, wait=60, *, server_settings={}, **opts): + if self._pg_version >= (12, 0): + server_settings = server_settings.copy() + server_settings['primary_conninfo'] = ( + '"host={host} port={port} user={user}"'.format( + host=self._master['host'], + port=self._master['port'], + user=self._repl_user, + ) + ) + + super().start(wait=wait, server_settings=server_settings, **opts) + + +class RunningCluster(Cluster): + def __init__(self, **kwargs): + self.conn_spec = kwargs + + def is_managed(self): + return False + + def get_connection_spec(self): + return dict(self.conn_spec) + + def get_status(self): + return 'running' + + def init(self, **settings): + pass + + def start(self, wait=60, **settings): + pass + + def stop(self, wait=60): + pass + + def destroy(self): + pass + + def reset_hba(self): + raise ClusterError('cannot modify HBA records of unmanaged cluster') + + def add_hba_entry(self, *, type='host', database, user, address=None, + auth_method, auth_options=None): + raise ClusterError('cannot modify HBA records of unmanaged cluster') diff --git a/sbsheriff/Lib/site-packages/asyncpg/compat.py b/sbsheriff/Lib/site-packages/asyncpg/compat.py new file mode 100644 index 0000000..29b8e16 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/compat.py @@ -0,0 +1,61 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import pathlib +import platform + + +SYSTEM = platform.uname().system + + +if SYSTEM == 'Windows': + import ctypes.wintypes + + CSIDL_APPDATA = 0x001a + + def get_pg_home_directory() -> pathlib.Path: + # We cannot simply use expanduser() as that returns the user's + # home directory, whereas Postgres stores its config in + # %AppData% on Windows. + buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) + r = ctypes.windll.shell32.SHGetFolderPathW(0, CSIDL_APPDATA, 0, 0, buf) + if r: + return None + else: + return pathlib.Path(buf.value) / 'postgresql' + +else: + def get_pg_home_directory() -> pathlib.Path: + return pathlib.Path.home() + + +async def wait_closed(stream): + # Not all asyncio versions have StreamWriter.wait_closed(). + if hasattr(stream, 'wait_closed'): + try: + await stream.wait_closed() + except ConnectionResetError: + # On Windows wait_closed() sometimes propagates + # ConnectionResetError which is totally unnecessary. + pass + + +# Workaround for https://bugs.python.org/issue37658 +async def wait_for(fut, timeout): + if timeout is None: + return await fut + + fut = asyncio.ensure_future(fut) + + try: + return await asyncio.wait_for(fut, timeout) + except asyncio.CancelledError: + if fut.done(): + return fut.result() + else: + raise diff --git a/sbsheriff/Lib/site-packages/asyncpg/connect_utils.py b/sbsheriff/Lib/site-packages/asyncpg/connect_utils.py new file mode 100644 index 0000000..40905ed --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/connect_utils.py @@ -0,0 +1,956 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import collections +import enum +import functools +import getpass +import os +import pathlib +import platform +import re +import socket +import ssl as ssl_module +import stat +import struct +import sys +import time +import typing +import urllib.parse +import warnings +import inspect + +from . import compat +from . import exceptions +from . import protocol + + +class SSLMode(enum.IntEnum): + disable = 0 + allow = 1 + prefer = 2 + require = 3 + verify_ca = 4 + verify_full = 5 + + @classmethod + def parse(cls, sslmode): + if isinstance(sslmode, cls): + return sslmode + return getattr(cls, sslmode.replace('-', '_')) + + +_ConnectionParameters = collections.namedtuple( + 'ConnectionParameters', + [ + 'user', + 'password', + 'database', + 'ssl', + 'sslmode', + 'direct_tls', + 'connect_timeout', + 'server_settings', + ]) + + +_ClientConfiguration = collections.namedtuple( + 'ConnectionConfiguration', + [ + 'command_timeout', + 'statement_cache_size', + 'max_cached_statement_lifetime', + 'max_cacheable_statement_size', + ]) + + +_system = platform.uname().system + + +if _system == 'Windows': + PGPASSFILE = 'pgpass.conf' +else: + PGPASSFILE = '.pgpass' + + +def _read_password_file(passfile: pathlib.Path) \ + -> typing.List[typing.Tuple[str, ...]]: + + passtab = [] + + try: + if not passfile.exists(): + return [] + + if not passfile.is_file(): + warnings.warn( + 'password file {!r} is not a plain file'.format(passfile)) + + return [] + + if _system != 'Windows': + if passfile.stat().st_mode & (stat.S_IRWXG | stat.S_IRWXO): + warnings.warn( + 'password file {!r} has group or world access; ' + 'permissions should be u=rw (0600) or less'.format( + passfile)) + + return [] + + with passfile.open('rt') as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + # Skip empty lines and comments. + continue + # Backslash escapes both itself and the colon, + # which is a record separator. + line = line.replace(R'\\', '\n') + passtab.append(tuple( + p.replace('\n', R'\\') + for p in re.split(r'(? pathlib.Path: + return (pathlib.Path.home() / '.postgresql' / filename).resolve() + + +def _parse_connect_dsn_and_args(*, dsn, host, port, user, + password, passfile, database, ssl, + direct_tls, connect_timeout, server_settings): + # `auth_hosts` is the version of host information for the purposes + # of reading the pgpass file. + auth_hosts = None + sslcert = sslkey = sslrootcert = sslcrl = sslpassword = None + ssl_min_protocol_version = ssl_max_protocol_version = None + + if dsn: + parsed = urllib.parse.urlparse(dsn) + + if parsed.scheme not in {'postgresql', 'postgres'}: + raise ValueError( + 'invalid DSN: scheme is expected to be either ' + '"postgresql" or "postgres", got {!r}'.format(parsed.scheme)) + + if parsed.netloc: + if '@' in parsed.netloc: + dsn_auth, _, dsn_hostspec = parsed.netloc.partition('@') + else: + dsn_hostspec = parsed.netloc + dsn_auth = '' + else: + dsn_auth = dsn_hostspec = '' + + if dsn_auth: + dsn_user, _, dsn_password = dsn_auth.partition(':') + else: + dsn_user = dsn_password = '' + + if not host and dsn_hostspec: + host, port = _parse_hostlist(dsn_hostspec, port, unquote=True) + + if parsed.path and database is None: + dsn_database = parsed.path + if dsn_database.startswith('/'): + dsn_database = dsn_database[1:] + database = urllib.parse.unquote(dsn_database) + + if user is None and dsn_user: + user = urllib.parse.unquote(dsn_user) + + if password is None and dsn_password: + password = urllib.parse.unquote(dsn_password) + + if parsed.query: + query = urllib.parse.parse_qs(parsed.query, strict_parsing=True) + for key, val in query.items(): + if isinstance(val, list): + query[key] = val[-1] + + if 'port' in query: + val = query.pop('port') + if not port and val: + port = [int(p) for p in val.split(',')] + + if 'host' in query: + val = query.pop('host') + if not host and val: + host, port = _parse_hostlist(val, port) + + if 'dbname' in query: + val = query.pop('dbname') + if database is None: + database = val + + if 'database' in query: + val = query.pop('database') + if database is None: + database = val + + if 'user' in query: + val = query.pop('user') + if user is None: + user = val + + if 'password' in query: + val = query.pop('password') + if password is None: + password = val + + if 'passfile' in query: + val = query.pop('passfile') + if passfile is None: + passfile = val + + if 'sslmode' in query: + val = query.pop('sslmode') + if ssl is None: + ssl = val + + if 'sslcert' in query: + sslcert = query.pop('sslcert') + + if 'sslkey' in query: + sslkey = query.pop('sslkey') + + if 'sslrootcert' in query: + sslrootcert = query.pop('sslrootcert') + + if 'sslcrl' in query: + sslcrl = query.pop('sslcrl') + + if 'sslpassword' in query: + sslpassword = query.pop('sslpassword') + + if 'ssl_min_protocol_version' in query: + ssl_min_protocol_version = query.pop( + 'ssl_min_protocol_version' + ) + + if 'ssl_max_protocol_version' in query: + ssl_max_protocol_version = query.pop( + 'ssl_max_protocol_version' + ) + + if query: + if server_settings is None: + server_settings = query + else: + server_settings = {**query, **server_settings} + + if not host: + hostspec = os.environ.get('PGHOST') + if hostspec: + host, port = _parse_hostlist(hostspec, port) + + if not host: + auth_hosts = ['localhost'] + + if _system == 'Windows': + host = ['localhost'] + else: + host = ['/run/postgresql', '/var/run/postgresql', + '/tmp', '/private/tmp', 'localhost'] + + if not isinstance(host, list): + host = [host] + + if auth_hosts is None: + auth_hosts = host + + if not port: + portspec = os.environ.get('PGPORT') + if portspec: + if ',' in portspec: + port = [int(p) for p in portspec.split(',')] + else: + port = int(portspec) + else: + port = 5432 + + elif isinstance(port, (list, tuple)): + port = [int(p) for p in port] + + else: + port = int(port) + + port = _validate_port_spec(host, port) + + if user is None: + user = os.getenv('PGUSER') + if not user: + user = getpass.getuser() + + if password is None: + password = os.getenv('PGPASSWORD') + + if database is None: + database = os.getenv('PGDATABASE') + + if database is None: + database = user + + if user is None: + raise exceptions.InterfaceError( + 'could not determine user name to connect with') + + if database is None: + raise exceptions.InterfaceError( + 'could not determine database name to connect to') + + if password is None: + if passfile is None: + passfile = os.getenv('PGPASSFILE') + + if passfile is None: + homedir = compat.get_pg_home_directory() + if homedir: + passfile = homedir / PGPASSFILE + else: + passfile = None + else: + passfile = pathlib.Path(passfile) + + if passfile is not None: + password = _read_password_from_pgpass( + hosts=auth_hosts, ports=port, + database=database, user=user, + passfile=passfile) + + addrs = [] + have_tcp_addrs = False + for h, p in zip(host, port): + if h.startswith('/'): + # UNIX socket name + if '.s.PGSQL.' not in h: + h = os.path.join(h, '.s.PGSQL.{}'.format(p)) + addrs.append(h) + else: + # TCP host/port + addrs.append((h, p)) + have_tcp_addrs = True + + if not addrs: + raise ValueError( + 'could not determine the database address to connect to') + + if ssl is None: + ssl = os.getenv('PGSSLMODE') + + if ssl is None and have_tcp_addrs: + ssl = 'prefer' + + if isinstance(ssl, (str, SSLMode)): + try: + sslmode = SSLMode.parse(ssl) + except AttributeError: + modes = ', '.join(m.name.replace('_', '-') for m in SSLMode) + raise exceptions.InterfaceError( + '`sslmode` parameter must be one of: {}'.format(modes)) + + # docs at https://www.postgresql.org/docs/10/static/libpq-connect.html + if sslmode < SSLMode.allow: + ssl = False + else: + ssl = ssl_module.SSLContext(ssl_module.PROTOCOL_TLS_CLIENT) + ssl.check_hostname = sslmode >= SSLMode.verify_full + if sslmode < SSLMode.require: + ssl.verify_mode = ssl_module.CERT_NONE + else: + if sslrootcert is None: + sslrootcert = os.getenv('PGSSLROOTCERT') + if sslrootcert: + ssl.load_verify_locations(cafile=sslrootcert) + ssl.verify_mode = ssl_module.CERT_REQUIRED + else: + sslrootcert = _dot_postgresql_path('root.crt') + try: + ssl.load_verify_locations(cafile=sslrootcert) + except FileNotFoundError: + if sslmode > SSLMode.require: + raise ValueError( + f'root certificate file "{sslrootcert}" does ' + f'not exist\nEither provide the file or ' + f'change sslmode to disable server ' + f'certificate verification.' + ) + elif sslmode == SSLMode.require: + ssl.verify_mode = ssl_module.CERT_NONE + else: + assert False, 'unreachable' + else: + ssl.verify_mode = ssl_module.CERT_REQUIRED + + if sslcrl is None: + sslcrl = os.getenv('PGSSLCRL') + if sslcrl: + ssl.load_verify_locations(cafile=sslcrl) + ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + else: + sslcrl = _dot_postgresql_path('root.crl') + try: + ssl.load_verify_locations(cafile=sslcrl) + except FileNotFoundError: + pass + else: + ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN + + if sslkey is None: + sslkey = os.getenv('PGSSLKEY') + if not sslkey: + sslkey = _dot_postgresql_path('postgresql.key') + if not sslkey.exists(): + sslkey = None + if not sslpassword: + sslpassword = '' + if sslcert is None: + sslcert = os.getenv('PGSSLCERT') + if sslcert: + ssl.load_cert_chain( + sslcert, keyfile=sslkey, password=lambda: sslpassword + ) + else: + sslcert = _dot_postgresql_path('postgresql.crt') + try: + ssl.load_cert_chain( + sslcert, keyfile=sslkey, password=lambda: sslpassword + ) + except FileNotFoundError: + pass + + # OpenSSL 1.1.1 keylog file, copied from create_default_context() + if hasattr(ssl, 'keylog_filename'): + keylogfile = os.environ.get('SSLKEYLOGFILE') + if keylogfile and not sys.flags.ignore_environment: + ssl.keylog_filename = keylogfile + + if ssl_min_protocol_version is None: + ssl_min_protocol_version = os.getenv('PGSSLMINPROTOCOLVERSION') + if ssl_min_protocol_version: + ssl.minimum_version = _parse_tls_version( + ssl_min_protocol_version + ) + else: + ssl.minimum_version = _parse_tls_version('TLSv1.2') + + if ssl_max_protocol_version is None: + ssl_max_protocol_version = os.getenv('PGSSLMAXPROTOCOLVERSION') + if ssl_max_protocol_version: + ssl.maximum_version = _parse_tls_version( + ssl_max_protocol_version + ) + + elif ssl is True: + ssl = ssl_module.create_default_context() + sslmode = SSLMode.verify_full + else: + sslmode = SSLMode.disable + + if server_settings is not None and ( + not isinstance(server_settings, dict) or + not all(isinstance(k, str) for k in server_settings) or + not all(isinstance(v, str) for v in server_settings.values())): + raise ValueError( + 'server_settings is expected to be None or ' + 'a Dict[str, str]') + + params = _ConnectionParameters( + user=user, password=password, database=database, ssl=ssl, + sslmode=sslmode, direct_tls=direct_tls, + connect_timeout=connect_timeout, server_settings=server_settings) + + return addrs, params + + +def _parse_connect_arguments(*, dsn, host, port, user, password, passfile, + database, timeout, command_timeout, + statement_cache_size, + max_cached_statement_lifetime, + max_cacheable_statement_size, + ssl, direct_tls, server_settings): + + local_vars = locals() + for var_name in {'max_cacheable_statement_size', + 'max_cached_statement_lifetime', + 'statement_cache_size'}: + var_val = local_vars[var_name] + if var_val is None or isinstance(var_val, bool) or var_val < 0: + raise ValueError( + '{} is expected to be greater ' + 'or equal to 0, got {!r}'.format(var_name, var_val)) + + if command_timeout is not None: + try: + if isinstance(command_timeout, bool): + raise ValueError + command_timeout = float(command_timeout) + if command_timeout <= 0: + raise ValueError + except ValueError: + raise ValueError( + 'invalid command_timeout value: ' + 'expected greater than 0 float (got {!r})'.format( + command_timeout)) from None + + addrs, params = _parse_connect_dsn_and_args( + dsn=dsn, host=host, port=port, user=user, + password=password, passfile=passfile, ssl=ssl, + direct_tls=direct_tls, database=database, + connect_timeout=timeout, server_settings=server_settings) + + config = _ClientConfiguration( + command_timeout=command_timeout, + statement_cache_size=statement_cache_size, + max_cached_statement_lifetime=max_cached_statement_lifetime, + max_cacheable_statement_size=max_cacheable_statement_size,) + + return addrs, params, config + + +class TLSUpgradeProto(asyncio.Protocol): + def __init__(self, loop, host, port, ssl_context, ssl_is_advisory): + self.on_data = _create_future(loop) + self.host = host + self.port = port + self.ssl_context = ssl_context + self.ssl_is_advisory = ssl_is_advisory + + def data_received(self, data): + if data == b'S': + self.on_data.set_result(True) + elif (self.ssl_is_advisory and + self.ssl_context.verify_mode == ssl_module.CERT_NONE and + data == b'N'): + # ssl_is_advisory will imply that ssl.verify_mode == CERT_NONE, + # since the only way to get ssl_is_advisory is from + # sslmode=prefer. But be extra sure to disallow insecure + # connections when the ssl context asks for real security. + self.on_data.set_result(False) + else: + self.on_data.set_exception( + ConnectionError( + 'PostgreSQL server at "{host}:{port}" ' + 'rejected SSL upgrade'.format( + host=self.host, port=self.port))) + + def connection_lost(self, exc): + if not self.on_data.done(): + if exc is None: + exc = ConnectionError('unexpected connection_lost() call') + self.on_data.set_exception(exc) + + +async def _create_ssl_connection(protocol_factory, host, port, *, + loop, ssl_context, ssl_is_advisory=False): + + tr, pr = await loop.create_connection( + lambda: TLSUpgradeProto(loop, host, port, + ssl_context, ssl_is_advisory), + host, port) + + tr.write(struct.pack('!ll', 8, 80877103)) # SSLRequest message. + + try: + do_ssl_upgrade = await pr.on_data + except (Exception, asyncio.CancelledError): + tr.close() + raise + + if hasattr(loop, 'start_tls'): + if do_ssl_upgrade: + try: + new_tr = await loop.start_tls( + tr, pr, ssl_context, server_hostname=host) + except (Exception, asyncio.CancelledError): + tr.close() + raise + else: + new_tr = tr + + pg_proto = protocol_factory() + pg_proto.is_ssl = do_ssl_upgrade + pg_proto.connection_made(new_tr) + new_tr.set_protocol(pg_proto) + + return new_tr, pg_proto + else: + conn_factory = functools.partial( + loop.create_connection, protocol_factory) + + if do_ssl_upgrade: + conn_factory = functools.partial( + conn_factory, ssl=ssl_context, server_hostname=host) + + sock = _get_socket(tr) + sock = sock.dup() + _set_nodelay(sock) + tr.close() + + try: + new_tr, pg_proto = await conn_factory(sock=sock) + pg_proto.is_ssl = do_ssl_upgrade + return new_tr, pg_proto + except (Exception, asyncio.CancelledError): + sock.close() + raise + + +async def _connect_addr( + *, + addr, + loop, + timeout, + params, + config, + connection_class, + record_class +): + assert loop is not None + + if timeout <= 0: + raise asyncio.TimeoutError + + params_input = params + if callable(params.password): + password = params.password() + if inspect.isawaitable(password): + password = await password + + params = params._replace(password=password) + args = (addr, loop, config, connection_class, record_class, params_input) + + # prepare the params (which attempt has ssl) for the 2 attempts + if params.sslmode == SSLMode.allow: + params_retry = params + params = params._replace(ssl=None) + elif params.sslmode == SSLMode.prefer: + params_retry = params._replace(ssl=None) + else: + # skip retry if we don't have to + return await __connect_addr(params, timeout, False, *args) + + # first attempt + before = time.monotonic() + try: + return await __connect_addr(params, timeout, True, *args) + except _RetryConnectSignal: + pass + + # second attempt + timeout -= time.monotonic() - before + if timeout <= 0: + raise asyncio.TimeoutError + else: + return await __connect_addr(params_retry, timeout, False, *args) + + +class _RetryConnectSignal(Exception): + pass + + +async def __connect_addr( + params, + timeout, + retry, + addr, + loop, + config, + connection_class, + record_class, + params_input, +): + connected = _create_future(loop) + + proto_factory = lambda: protocol.Protocol( + addr, connected, params, record_class, loop) + + if isinstance(addr, str): + # UNIX socket + connector = loop.create_unix_connection(proto_factory, addr) + + elif params.ssl and params.direct_tls: + # if ssl and direct_tls are given, skip STARTTLS and perform direct + # SSL connection + connector = loop.create_connection( + proto_factory, *addr, ssl=params.ssl + ) + + elif params.ssl: + connector = _create_ssl_connection( + proto_factory, *addr, loop=loop, ssl_context=params.ssl, + ssl_is_advisory=params.sslmode == SSLMode.prefer) + else: + connector = loop.create_connection(proto_factory, *addr) + + connector = asyncio.ensure_future(connector) + before = time.monotonic() + tr, pr = await compat.wait_for(connector, timeout=timeout) + timeout -= time.monotonic() - before + + try: + if timeout <= 0: + raise asyncio.TimeoutError + await compat.wait_for(connected, timeout=timeout) + except ( + exceptions.InvalidAuthorizationSpecificationError, + exceptions.ConnectionDoesNotExistError, # seen on Windows + ): + tr.close() + + # retry=True here is a redundant check because we don't want to + # accidentally raise the internal _RetryConnectSignal to the user + if retry and ( + params.sslmode == SSLMode.allow and not pr.is_ssl or + params.sslmode == SSLMode.prefer and pr.is_ssl + ): + # Trigger retry when: + # 1. First attempt with sslmode=allow, ssl=None failed + # 2. First attempt with sslmode=prefer, ssl=ctx failed while the + # server claimed to support SSL (returning "S" for SSLRequest) + # (likely because pg_hba.conf rejected the connection) + raise _RetryConnectSignal() + + else: + # but will NOT retry if: + # 1. First attempt with sslmode=prefer failed but the server + # doesn't support SSL (returning 'N' for SSLRequest), because + # we already tried to connect without SSL thru ssl_is_advisory + # 2. Second attempt with sslmode=prefer, ssl=None failed + # 3. Second attempt with sslmode=allow, ssl=ctx failed + # 4. Any other sslmode + raise + + except (Exception, asyncio.CancelledError): + tr.close() + raise + + con = connection_class(pr, tr, loop, addr, config, params_input) + pr.set_connection(con) + return con + + +async def _connect(*, loop, timeout, connection_class, record_class, **kwargs): + if loop is None: + loop = asyncio.get_event_loop() + + addrs, params, config = _parse_connect_arguments(timeout=timeout, **kwargs) + + last_error = None + addr = None + for addr in addrs: + before = time.monotonic() + try: + return await _connect_addr( + addr=addr, + loop=loop, + timeout=timeout, + params=params, + config=config, + connection_class=connection_class, + record_class=record_class, + ) + except (OSError, asyncio.TimeoutError, ConnectionError) as ex: + last_error = ex + finally: + timeout -= time.monotonic() - before + + raise last_error + + +async def _cancel(*, loop, addr, params: _ConnectionParameters, + backend_pid, backend_secret): + + class CancelProto(asyncio.Protocol): + + def __init__(self): + self.on_disconnect = _create_future(loop) + self.is_ssl = False + + def connection_lost(self, exc): + if not self.on_disconnect.done(): + self.on_disconnect.set_result(True) + + if isinstance(addr, str): + tr, pr = await loop.create_unix_connection(CancelProto, addr) + else: + if params.ssl and params.sslmode != SSLMode.allow: + tr, pr = await _create_ssl_connection( + CancelProto, + *addr, + loop=loop, + ssl_context=params.ssl, + ssl_is_advisory=params.sslmode == SSLMode.prefer) + else: + tr, pr = await loop.create_connection( + CancelProto, *addr) + _set_nodelay(_get_socket(tr)) + + # Pack a CancelRequest message + msg = struct.pack('!llll', 16, 80877102, backend_pid, backend_secret) + + try: + tr.write(msg) + await pr.on_disconnect + finally: + tr.close() + + +def _get_socket(transport): + sock = transport.get_extra_info('socket') + if sock is None: + # Shouldn't happen with any asyncio-complaint event loop. + raise ConnectionError( + 'could not get the socket for transport {!r}'.format(transport)) + return sock + + +def _set_nodelay(sock): + if not hasattr(socket, 'AF_UNIX') or sock.family != socket.AF_UNIX: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + +def _create_future(loop): + try: + create_future = loop.create_future + except AttributeError: + return asyncio.Future(loop=loop) + else: + return create_future() diff --git a/sbsheriff/Lib/site-packages/asyncpg/connection.py b/sbsheriff/Lib/site-packages/asyncpg/connection.py new file mode 100644 index 0000000..365ab41 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/connection.py @@ -0,0 +1,2401 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import asyncpg +import collections +import collections.abc +import functools +import itertools +import inspect +import os +import sys +import time +import traceback +import typing +import warnings +import weakref + +from . import connect_utils +from . import cursor +from . import exceptions +from . import introspection +from . import prepared_stmt +from . import protocol +from . import serverversion +from . import transaction +from . import utils + + +class ConnectionMeta(type): + + def __instancecheck__(cls, instance): + mro = type(instance).__mro__ + return Connection in mro or _ConnectionProxy in mro + + +class Connection(metaclass=ConnectionMeta): + """A representation of a database session. + + Connections are created by calling :func:`~asyncpg.connection.connect`. + """ + + __slots__ = ('_protocol', '_transport', '_loop', + '_top_xact', '_aborted', + '_pool_release_ctr', '_stmt_cache', '_stmts_to_close', + '_listeners', '_server_version', '_server_caps', + '_intro_query', '_reset_query', '_proxy', + '_stmt_exclusive_section', '_config', '_params', '_addr', + '_log_listeners', '_termination_listeners', '_cancellations', + '_source_traceback', '__weakref__') + + def __init__(self, protocol, transport, loop, + addr, + config: connect_utils._ClientConfiguration, + params: connect_utils._ConnectionParameters): + self._protocol = protocol + self._transport = transport + self._loop = loop + self._top_xact = None + self._aborted = False + # Incremented every time the connection is released back to a pool. + # Used to catch invalid references to connection-related resources + # post-release (e.g. explicit prepared statements). + self._pool_release_ctr = 0 + + self._addr = addr + self._config = config + self._params = params + + self._stmt_cache = _StatementCache( + loop=loop, + max_size=config.statement_cache_size, + on_remove=functools.partial( + _weak_maybe_gc_stmt, weakref.ref(self)), + max_lifetime=config.max_cached_statement_lifetime) + + self._stmts_to_close = set() + + self._listeners = {} + self._log_listeners = set() + self._cancellations = set() + self._termination_listeners = set() + + settings = self._protocol.get_settings() + ver_string = settings.server_version + self._server_version = \ + serverversion.split_server_version_string(ver_string) + + self._server_caps = _detect_server_capabilities( + self._server_version, settings) + + if self._server_version < (14, 0): + self._intro_query = introspection.INTRO_LOOKUP_TYPES_13 + else: + self._intro_query = introspection.INTRO_LOOKUP_TYPES + + self._reset_query = None + self._proxy = None + + # Used to serialize operations that might involve anonymous + # statements. Specifically, we want to make the following + # operation atomic: + # ("prepare an anonymous statement", "use the statement") + # + # Used for `con.fetchval()`, `con.fetch()`, `con.fetchrow()`, + # `con.execute()`, and `con.executemany()`. + self._stmt_exclusive_section = _Atomic() + + if loop.get_debug(): + self._source_traceback = _extract_stack() + else: + self._source_traceback = None + + def __del__(self): + if not self.is_closed() and self._protocol is not None: + if self._source_traceback: + msg = "unclosed connection {!r}; created at:\n {}".format( + self, self._source_traceback) + else: + msg = ( + "unclosed connection {!r}; run in asyncio debug " + "mode to show the traceback of connection " + "origin".format(self) + ) + + warnings.warn(msg, ResourceWarning) + if not self._loop.is_closed(): + self.terminate() + + async def add_listener(self, channel, callback): + """Add a listener for Postgres notifications. + + :param str channel: Channel to listen on. + + :param callable callback: + A callable or a coroutine function receiving the following + arguments: + **connection**: a Connection the callback is registered with; + **pid**: PID of the Postgres server that sent the notification; + **channel**: name of the channel the notification was sent to; + **payload**: the payload. + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. + """ + self._check_open() + if channel not in self._listeners: + await self.fetch('LISTEN {}'.format(utils._quote_ident(channel))) + self._listeners[channel] = set() + self._listeners[channel].add(_Callback.from_callable(callback)) + + async def remove_listener(self, channel, callback): + """Remove a listening callback on the specified channel.""" + if self.is_closed(): + return + if channel not in self._listeners: + return + cb = _Callback.from_callable(callback) + if cb not in self._listeners[channel]: + return + self._listeners[channel].remove(cb) + if not self._listeners[channel]: + del self._listeners[channel] + await self.fetch('UNLISTEN {}'.format(utils._quote_ident(channel))) + + def add_log_listener(self, callback): + """Add a listener for Postgres log messages. + + It will be called when asyncronous NoticeResponse is received + from the connection. Possible message types are: WARNING, NOTICE, + DEBUG, INFO, or LOG. + + :param callable callback: + A callable or a coroutine function receiving the following + arguments: + **connection**: a Connection the callback is registered with; + **message**: the `exceptions.PostgresLogMessage` message. + + .. versionadded:: 0.12.0 + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. + """ + if self.is_closed(): + raise exceptions.InterfaceError('connection is closed') + self._log_listeners.add(_Callback.from_callable(callback)) + + def remove_log_listener(self, callback): + """Remove a listening callback for log messages. + + .. versionadded:: 0.12.0 + """ + self._log_listeners.discard(_Callback.from_callable(callback)) + + def add_termination_listener(self, callback): + """Add a listener that will be called when the connection is closed. + + :param callable callback: + A callable or a coroutine function receiving one argument: + **connection**: a Connection the callback is registered with. + + .. versionadded:: 0.21.0 + + .. versionchanged:: 0.24.0 + The ``callback`` argument may be a coroutine function. + """ + self._termination_listeners.add(_Callback.from_callable(callback)) + + def remove_termination_listener(self, callback): + """Remove a listening callback for connection termination. + + :param callable callback: + The callable or coroutine function that was passed to + :meth:`Connection.add_termination_listener`. + + .. versionadded:: 0.21.0 + """ + self._termination_listeners.discard(_Callback.from_callable(callback)) + + def get_server_pid(self): + """Return the PID of the Postgres server the connection is bound to.""" + return self._protocol.get_server_pid() + + def get_server_version(self): + """Return the version of the connected PostgreSQL server. + + The returned value is a named tuple similar to that in + ``sys.version_info``: + + .. code-block:: pycon + + >>> con.get_server_version() + ServerVersion(major=9, minor=6, micro=1, + releaselevel='final', serial=0) + + .. versionadded:: 0.8.0 + """ + return self._server_version + + def get_settings(self): + """Return connection settings. + + :return: :class:`~asyncpg.ConnectionSettings`. + """ + return self._protocol.get_settings() + + def transaction(self, *, isolation=None, readonly=False, + deferrable=False): + """Create a :class:`~transaction.Transaction` object. + + Refer to `PostgreSQL documentation`_ on the meaning of transaction + parameters. + + :param isolation: Transaction isolation mode, can be one of: + `'serializable'`, `'repeatable_read'`, + `'read_committed'`. If not specified, the behavior + is up to the server and session, which is usually + ``read_committed``. + + :param readonly: Specifies whether or not this transaction is + read-only. + + :param deferrable: Specifies whether or not this transaction is + deferrable. + + .. _`PostgreSQL documentation`: + https://www.postgresql.org/docs/ + current/static/sql-set-transaction.html + """ + self._check_open() + return transaction.Transaction(self, isolation, readonly, deferrable) + + def is_in_transaction(self): + """Return True if Connection is currently inside a transaction. + + :return bool: True if inside transaction, False otherwise. + + .. versionadded:: 0.16.0 + """ + return self._protocol.is_in_transaction() + + async def execute(self, query: str, *args, timeout: float=None) -> str: + """Execute an SQL command (or commands). + + This method can execute many SQL commands at once, when no arguments + are provided. + + Example: + + .. code-block:: pycon + + >>> await con.execute(''' + ... CREATE TABLE mytab (a int); + ... INSERT INTO mytab (a) VALUES (100), (200), (300); + ... ''') + INSERT 0 3 + + >>> await con.execute(''' + ... INSERT INTO mytab (a) VALUES ($1), ($2) + ... ''', 10, 20) + INSERT 0 2 + + :param args: Query arguments. + :param float timeout: Optional timeout value in seconds. + :return str: Status of the last SQL command. + + .. versionchanged:: 0.5.4 + Made it possible to pass query arguments. + """ + self._check_open() + + if not args: + return await self._protocol.query(query, timeout) + + _, status, _ = await self._execute( + query, + args, + 0, + timeout, + return_status=True, + ) + return status.decode() + + async def executemany(self, command: str, args, *, timeout: float=None): + """Execute an SQL *command* for each sequence of arguments in *args*. + + Example: + + .. code-block:: pycon + + >>> await con.executemany(''' + ... INSERT INTO mytab (a) VALUES ($1, $2, $3); + ... ''', [(1, 2, 3), (4, 5, 6)]) + + :param command: Command to execute. + :param args: An iterable containing sequences of arguments. + :param float timeout: Optional timeout value in seconds. + :return None: This method discards the results of the operations. + + .. versionadded:: 0.7.0 + + .. versionchanged:: 0.11.0 + `timeout` became a keyword-only parameter. + + .. versionchanged:: 0.22.0 + ``executemany()`` is now an atomic operation, which means that + either all executions succeed, or none at all. This is in contrast + to prior versions, where the effect of already-processed iterations + would remain in place when an error has occurred, unless + ``executemany()`` was called in a transaction. + """ + self._check_open() + return await self._executemany(command, args, timeout) + + async def _get_statement( + self, + query, + timeout, + *, + named=False, + use_cache=True, + ignore_custom_codec=False, + record_class=None + ): + if record_class is None: + record_class = self._protocol.get_record_class() + else: + _check_record_class(record_class) + + if use_cache: + statement = self._stmt_cache.get( + (query, record_class, ignore_custom_codec) + ) + if statement is not None: + return statement + + # Only use the cache when: + # * `statement_cache_size` is greater than 0; + # * query size is less than `max_cacheable_statement_size`. + use_cache = self._stmt_cache.get_max_size() > 0 + if (use_cache and + self._config.max_cacheable_statement_size and + len(query) > self._config.max_cacheable_statement_size): + use_cache = False + + if isinstance(named, str): + stmt_name = named + elif use_cache or named: + stmt_name = self._get_unique_id('stmt') + else: + stmt_name = '' + + statement = await self._protocol.prepare( + stmt_name, + query, + timeout, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + need_reprepare = False + types_with_missing_codecs = statement._init_types() + tries = 0 + while types_with_missing_codecs: + settings = self._protocol.get_settings() + + # Introspect newly seen types and populate the + # codec cache. + types, intro_stmt = await self._introspect_types( + types_with_missing_codecs, timeout) + + settings.register_data_types(types) + + # The introspection query has used an anonymous statement, + # which has blown away the anonymous statement we've prepared + # for the query, so we need to re-prepare it. + need_reprepare = not intro_stmt.name and not statement.name + types_with_missing_codecs = statement._init_types() + tries += 1 + if tries > 5: + # In the vast majority of cases there will be only + # one iteration. In rare cases, there might be a race + # with reload_schema_state(), which would cause a + # second try. More than five is clearly a bug. + raise exceptions.InternalClientError( + 'could not resolve query result and/or argument types ' + 'in {} attempts'.format(tries) + ) + + # Now that types have been resolved, populate the codec pipeline + # for the statement. + statement._init_codecs() + + if need_reprepare: + await self._protocol.prepare( + stmt_name, + query, + timeout, + state=statement, + record_class=record_class, + ) + + if use_cache: + self._stmt_cache.put( + (query, record_class, ignore_custom_codec), statement) + + # If we've just created a new statement object, check if there + # are any statements for GC. + if self._stmts_to_close: + await self._cleanup_stmts() + + return statement + + async def _introspect_types(self, typeoids, timeout): + return await self.__execute( + self._intro_query, + (list(typeoids),), + 0, + timeout, + ignore_custom_codec=True, + ) + + async def _introspect_type(self, typename, schema): + if ( + schema == 'pg_catalog' + and typename.lower() in protocol.BUILTIN_TYPE_NAME_MAP + ): + typeoid = protocol.BUILTIN_TYPE_NAME_MAP[typename.lower()] + rows = await self._execute( + introspection.TYPE_BY_OID, + [typeoid], + limit=0, + timeout=None, + ignore_custom_codec=True, + ) + else: + rows = await self._execute( + introspection.TYPE_BY_NAME, + [typename, schema], + limit=1, + timeout=None, + ignore_custom_codec=True, + ) + + if not rows: + raise ValueError( + 'unknown type: {}.{}'.format(schema, typename)) + + return rows[0] + + def cursor( + self, + query, + *args, + prefetch=None, + timeout=None, + record_class=None + ): + """Return a *cursor factory* for the specified query. + + :param args: + Query arguments. + :param int prefetch: + The number of rows the *cursor iterator* + will prefetch (defaults to ``50``.) + :param float timeout: + Optional timeout in seconds. + :param type record_class: + If specified, the class to use for records returned by this cursor. + Must be a subclass of :class:`~asyncpg.Record`. If not specified, + a per-connection *record_class* is used. + + :return: + A :class:`~cursor.CursorFactory` object. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + """ + self._check_open() + return cursor.CursorFactory( + self, + query, + None, + args, + prefetch, + timeout, + record_class, + ) + + async def prepare( + self, + query, + *, + name=None, + timeout=None, + record_class=None, + ): + """Create a *prepared statement* for the specified query. + + :param str query: + Text of the query to create a prepared statement for. + :param str name: + Optional name of the returned prepared statement. If not + specified, the name is auto-generated. + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for records returned by the + prepared statement. Must be a subclass of + :class:`~asyncpg.Record`. If not specified, a per-connection + *record_class* is used. + + :return: + A :class:`~prepared_stmt.PreparedStatement` instance. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + + .. versionchanged:: 0.25.0 + Added the *name* parameter. + """ + return await self._prepare( + query, + name=name, + timeout=timeout, + use_cache=False, + record_class=record_class, + ) + + async def _prepare( + self, + query, + *, + name=None, + timeout=None, + use_cache: bool=False, + record_class=None + ): + self._check_open() + stmt = await self._get_statement( + query, + timeout, + named=True if name is None else name, + use_cache=use_cache, + record_class=record_class, + ) + return prepared_stmt.PreparedStatement(self, query, stmt) + + async def fetch( + self, + query, + *args, + timeout=None, + record_class=None + ) -> list: + """Run a query and return the results as a list of :class:`Record`. + + :param str query: + Query text. + :param args: + Query arguments. + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for records returned by this method. + Must be a subclass of :class:`~asyncpg.Record`. If not specified, + a per-connection *record_class* is used. + + :return list: + A list of :class:`~asyncpg.Record` instances. If specified, the + actual type of list elements would be *record_class*. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + """ + self._check_open() + return await self._execute( + query, + args, + 0, + timeout, + record_class=record_class, + ) + + async def fetchval(self, query, *args, column=0, timeout=None): + """Run a query and return a value in the first row. + + :param str query: Query text. + :param args: Query arguments. + :param int column: Numeric index within the record of the value to + return (defaults to 0). + :param float timeout: Optional timeout value in seconds. + If not specified, defaults to the value of + ``command_timeout`` argument to the ``Connection`` + instance constructor. + + :return: The value of the specified column of the first record, or + None if no records were returned by the query. + """ + self._check_open() + data = await self._execute(query, args, 1, timeout) + if not data: + return None + return data[0][column] + + async def fetchrow( + self, + query, + *args, + timeout=None, + record_class=None + ): + """Run a query and return the first row. + + :param str query: + Query text + :param args: + Query arguments + :param float timeout: + Optional timeout value in seconds. + :param type record_class: + If specified, the class to use for the value returned by this + method. Must be a subclass of :class:`~asyncpg.Record`. + If not specified, a per-connection *record_class* is used. + + :return: + The first row as a :class:`~asyncpg.Record` instance, or None if + no records were returned by the query. If specified, + *record_class* is used as the type for the result value. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + """ + self._check_open() + data = await self._execute( + query, + args, + 1, + timeout, + record_class=record_class, + ) + if not data: + return None + return data[0] + + async def copy_from_table(self, table_name, *, output, + columns=None, schema_name=None, timeout=None, + format=None, oids=None, delimiter=None, + null=None, header=None, quote=None, + escape=None, force_quote=None, encoding=None): + """Copy table contents to a file or file-like object. + + :param str table_name: + The name of the table to copy data from. + + :param output: + A :term:`path-like object `, + or a :term:`file-like object `, or + a :term:`coroutine function ` + that takes a ``bytes`` instance as a sole argument. + + :param list columns: + An optional list of column names to copy. + + :param str schema_name: + An optional schema name to qualify the table. + + :param float timeout: + Optional timeout value in seconds. + + The remaining keyword arguments are ``COPY`` statement options, + see `COPY statement documentation`_ for details. + + :return: The status string of the COPY command. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... result = await con.copy_from_table( + ... 'mytable', columns=('foo', 'bar'), + ... output='file.csv', format='csv') + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 100' + + .. _`COPY statement documentation`: + https://www.postgresql.org/docs/current/static/sql-copy.html + + .. versionadded:: 0.11.0 + """ + tabname = utils._quote_ident(table_name) + if schema_name: + tabname = utils._quote_ident(schema_name) + '.' + tabname + + if columns: + cols = '({})'.format( + ', '.join(utils._quote_ident(c) for c in columns)) + else: + cols = '' + + opts = self._format_copy_opts( + format=format, oids=oids, delimiter=delimiter, + null=null, header=header, quote=quote, escape=escape, + force_quote=force_quote, encoding=encoding + ) + + copy_stmt = 'COPY {tab}{cols} TO STDOUT {opts}'.format( + tab=tabname, cols=cols, opts=opts) + + return await self._copy_out(copy_stmt, output, timeout) + + async def copy_from_query(self, query, *args, output, + timeout=None, format=None, oids=None, + delimiter=None, null=None, header=None, + quote=None, escape=None, force_quote=None, + encoding=None): + """Copy the results of a query to a file or file-like object. + + :param str query: + The query to copy the results of. + + :param args: + Query arguments. + + :param output: + A :term:`path-like object `, + or a :term:`file-like object `, or + a :term:`coroutine function ` + that takes a ``bytes`` instance as a sole argument. + + :param float timeout: + Optional timeout value in seconds. + + The remaining keyword arguments are ``COPY`` statement options, + see `COPY statement documentation`_ for details. + + :return: The status string of the COPY command. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... result = await con.copy_from_query( + ... 'SELECT foo, bar FROM mytable WHERE foo > $1', 10, + ... output='file.csv', format='csv') + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 10' + + .. _`COPY statement documentation`: + https://www.postgresql.org/docs/current/static/sql-copy.html + + .. versionadded:: 0.11.0 + """ + opts = self._format_copy_opts( + format=format, oids=oids, delimiter=delimiter, + null=null, header=header, quote=quote, escape=escape, + force_quote=force_quote, encoding=encoding + ) + + if args: + query = await utils._mogrify(self, query, args) + + copy_stmt = 'COPY ({query}) TO STDOUT {opts}'.format( + query=query, opts=opts) + + return await self._copy_out(copy_stmt, output, timeout) + + async def copy_to_table(self, table_name, *, source, + columns=None, schema_name=None, timeout=None, + format=None, oids=None, freeze=None, + delimiter=None, null=None, header=None, + quote=None, escape=None, force_quote=None, + force_not_null=None, force_null=None, + encoding=None): + """Copy data to the specified table. + + :param str table_name: + The name of the table to copy data to. + + :param source: + A :term:`path-like object `, + or a :term:`file-like object `, or + an :term:`asynchronous iterable ` + that returns ``bytes``, or an object supporting the + :ref:`buffer protocol `. + + :param list columns: + An optional list of column names to copy. + + :param str schema_name: + An optional schema name to qualify the table. + + :param float timeout: + Optional timeout value in seconds. + + The remaining keyword arguments are ``COPY`` statement options, + see `COPY statement documentation`_ for details. + + :return: The status string of the COPY command. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... result = await con.copy_to_table( + ... 'mytable', source='datafile.tbl') + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 140000' + + .. _`COPY statement documentation`: + https://www.postgresql.org/docs/current/static/sql-copy.html + + .. versionadded:: 0.11.0 + """ + tabname = utils._quote_ident(table_name) + if schema_name: + tabname = utils._quote_ident(schema_name) + '.' + tabname + + if columns: + cols = '({})'.format( + ', '.join(utils._quote_ident(c) for c in columns)) + else: + cols = '' + + opts = self._format_copy_opts( + format=format, oids=oids, freeze=freeze, delimiter=delimiter, + null=null, header=header, quote=quote, escape=escape, + force_not_null=force_not_null, force_null=force_null, + encoding=encoding + ) + + copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts}'.format( + tab=tabname, cols=cols, opts=opts) + + return await self._copy_in(copy_stmt, source, timeout) + + async def copy_records_to_table(self, table_name, *, records, + columns=None, schema_name=None, + timeout=None): + """Copy a list of records to the specified table using binary COPY. + + :param str table_name: + The name of the table to copy data to. + + :param records: + An iterable returning row tuples to copy into the table. + :term:`Asynchronous iterables ` + are also supported. + + :param list columns: + An optional list of column names to copy. + + :param str schema_name: + An optional schema name to qualify the table. + + :param float timeout: + Optional timeout value in seconds. + + :return: The status string of the COPY command. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... result = await con.copy_records_to_table( + ... 'mytable', records=[ + ... (1, 'foo', 'bar'), + ... (2, 'ham', 'spam')]) + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 2' + + Asynchronous record iterables are also supported: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... async def record_gen(size): + ... for i in range(size): + ... yield (i,) + ... result = await con.copy_records_to_table( + ... 'mytable', records=record_gen(100)) + ... print(result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + 'COPY 100' + + .. versionadded:: 0.11.0 + + .. versionchanged:: 0.24.0 + The ``records`` argument may be an asynchronous iterable. + """ + tabname = utils._quote_ident(table_name) + if schema_name: + tabname = utils._quote_ident(schema_name) + '.' + tabname + + if columns: + col_list = ', '.join(utils._quote_ident(c) for c in columns) + cols = '({})'.format(col_list) + else: + col_list = '*' + cols = '' + + intro_query = 'SELECT {cols} FROM {tab} LIMIT 1'.format( + tab=tabname, cols=col_list) + + intro_ps = await self._prepare(intro_query, use_cache=True) + + opts = '(FORMAT binary)' + + copy_stmt = 'COPY {tab}{cols} FROM STDIN {opts}'.format( + tab=tabname, cols=cols, opts=opts) + + return await self._protocol.copy_in( + copy_stmt, None, None, records, intro_ps._state, timeout) + + def _format_copy_opts(self, *, format=None, oids=None, freeze=None, + delimiter=None, null=None, header=None, quote=None, + escape=None, force_quote=None, force_not_null=None, + force_null=None, encoding=None): + kwargs = dict(locals()) + kwargs.pop('self') + opts = [] + + if force_quote is not None and isinstance(force_quote, bool): + kwargs.pop('force_quote') + if force_quote: + opts.append('FORCE_QUOTE *') + + for k, v in kwargs.items(): + if v is not None: + if k in ('force_not_null', 'force_null', 'force_quote'): + v = '(' + ', '.join(utils._quote_ident(c) for c in v) + ')' + elif k in ('oids', 'freeze', 'header'): + v = str(v) + else: + v = utils._quote_literal(v) + + opts.append('{} {}'.format(k.upper(), v)) + + if opts: + return '(' + ', '.join(opts) + ')' + else: + return '' + + async def _copy_out(self, copy_stmt, output, timeout): + try: + path = os.fspath(output) + except TypeError: + # output is not a path-like object + path = None + + writer = None + opened_by_us = False + run_in_executor = self._loop.run_in_executor + + if path is not None: + # a path + f = await run_in_executor(None, open, path, 'wb') + opened_by_us = True + elif hasattr(output, 'write'): + # file-like + f = output + elif callable(output): + # assuming calling output returns an awaitable. + writer = output + else: + raise TypeError( + 'output is expected to be a file-like object, ' + 'a path-like object or a coroutine function, ' + 'not {}'.format(type(output).__name__) + ) + + if writer is None: + async def _writer(data): + await run_in_executor(None, f.write, data) + writer = _writer + + try: + return await self._protocol.copy_out(copy_stmt, writer, timeout) + finally: + if opened_by_us: + f.close() + + async def _copy_in(self, copy_stmt, source, timeout): + try: + path = os.fspath(source) + except TypeError: + # source is not a path-like object + path = None + + f = None + reader = None + data = None + opened_by_us = False + run_in_executor = self._loop.run_in_executor + + if path is not None: + # a path + f = await run_in_executor(None, open, path, 'rb') + opened_by_us = True + elif hasattr(source, 'read'): + # file-like + f = source + elif isinstance(source, collections.abc.AsyncIterable): + # assuming calling output returns an awaitable. + # copy_in() is designed to handle very large amounts of data, and + # the source async iterable is allowed to return an arbitrary + # amount of data on every iteration. + reader = source + else: + # assuming source is an instance supporting the buffer protocol. + data = source + + if f is not None: + # Copying from a file-like object. + class _Reader: + def __aiter__(self): + return self + + async def __anext__(self): + data = await run_in_executor(None, f.read, 524288) + if len(data) == 0: + raise StopAsyncIteration + else: + return data + + reader = _Reader() + + try: + return await self._protocol.copy_in( + copy_stmt, reader, data, None, None, timeout) + finally: + if opened_by_us: + await run_in_executor(None, f.close) + + async def set_type_codec(self, typename, *, + schema='public', encoder, decoder, + format='text'): + """Set an encoder/decoder pair for the specified data type. + + :param typename: + Name of the data type the codec is for. + + :param schema: + Schema name of the data type the codec is for + (defaults to ``'public'``) + + :param format: + The type of the argument received by the *decoder* callback, + and the type of the *encoder* callback return value. + + If *format* is ``'text'`` (the default), the exchange datum is a + ``str`` instance containing valid text representation of the + data type. + + If *format* is ``'binary'``, the exchange datum is a ``bytes`` + instance containing valid _binary_ representation of the + data type. + + If *format* is ``'tuple'``, the exchange datum is a type-specific + ``tuple`` of values. The table below lists supported data + types and their format for this mode. + + +-----------------+---------------------------------------------+ + | Type | Tuple layout | + +=================+=============================================+ + | ``interval`` | (``months``, ``days``, ``microseconds``) | + +-----------------+---------------------------------------------+ + | ``date`` | (``date ordinal relative to Jan 1 2000``,) | + | | ``-2^31`` for negative infinity timestamp | + | | ``2^31-1`` for positive infinity timestamp. | + +-----------------+---------------------------------------------+ + | ``timestamp`` | (``microseconds relative to Jan 1 2000``,) | + | | ``-2^63`` for negative infinity timestamp | + | | ``2^63-1`` for positive infinity timestamp. | + +-----------------+---------------------------------------------+ + | ``timestamp | (``microseconds relative to Jan 1 2000 | + | with time zone``| UTC``,) | + | | ``-2^63`` for negative infinity timestamp | + | | ``2^63-1`` for positive infinity timestamp. | + +-----------------+---------------------------------------------+ + | ``time`` | (``microseconds``,) | + +-----------------+---------------------------------------------+ + | ``time with | (``microseconds``, | + | time zone`` | ``time zone offset in seconds``) | + +-----------------+---------------------------------------------+ + + :param encoder: + Callable accepting a Python object as a single argument and + returning a value encoded according to *format*. + + :param decoder: + Callable accepting a single argument encoded according to *format* + and returning a decoded Python object. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> import datetime + >>> from dateutil.relativedelta import relativedelta + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... def encoder(delta): + ... ndelta = delta.normalized() + ... return (ndelta.years * 12 + ndelta.months, + ... ndelta.days, + ... ((ndelta.hours * 3600 + + ... ndelta.minutes * 60 + + ... ndelta.seconds) * 1000000 + + ... ndelta.microseconds)) + ... def decoder(tup): + ... return relativedelta(months=tup[0], days=tup[1], + ... microseconds=tup[2]) + ... await con.set_type_codec( + ... 'interval', schema='pg_catalog', encoder=encoder, + ... decoder=decoder, format='tuple') + ... result = await con.fetchval( + ... "SELECT '2 years 3 mons 1 day'::interval") + ... print(result) + ... print(datetime.datetime(2002, 1, 1) + result) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + relativedelta(years=+2, months=+3, days=+1) + 2004-04-02 00:00:00 + + .. versionadded:: 0.12.0 + Added the ``format`` keyword argument and support for 'tuple' + format. + + .. versionchanged:: 0.12.0 + The ``binary`` keyword argument is deprecated in favor of + ``format``. + + .. versionchanged:: 0.13.0 + The ``binary`` keyword argument was removed in favor of + ``format``. + + .. note:: + + It is recommended to use the ``'binary'`` or ``'tuple'`` *format* + whenever possible and if the underlying type supports it. Asyncpg + currently does not support text I/O for composite and range types, + and some other functionality, such as + :meth:`Connection.copy_to_table`, does not support types with text + codecs. + """ + self._check_open() + typeinfo = await self._introspect_type(typename, schema) + if not introspection.is_scalar_type(typeinfo): + raise exceptions.InterfaceError( + 'cannot use custom codec on non-scalar type {}.{}'.format( + schema, typename)) + if introspection.is_domain_type(typeinfo): + raise exceptions.UnsupportedClientFeatureError( + 'custom codecs on domain types are not supported', + hint='Set the codec on the base type.', + detail=( + 'PostgreSQL does not distinguish domains from ' + 'their base types in query results at the protocol level.' + ) + ) + + oid = typeinfo['oid'] + self._protocol.get_settings().add_python_codec( + oid, typename, schema, 'scalar', + encoder, decoder, format) + + # Statement cache is no longer valid due to codec changes. + self._drop_local_statement_cache() + + async def reset_type_codec(self, typename, *, schema='public'): + """Reset *typename* codec to the default implementation. + + :param typename: + Name of the data type the codec is for. + + :param schema: + Schema name of the data type the codec is for + (defaults to ``'public'``) + + .. versionadded:: 0.12.0 + """ + + typeinfo = await self._introspect_type(typename, schema) + self._protocol.get_settings().remove_python_codec( + typeinfo['oid'], typename, schema) + + # Statement cache is no longer valid due to codec changes. + self._drop_local_statement_cache() + + async def set_builtin_type_codec(self, typename, *, + schema='public', codec_name, + format=None): + """Set a builtin codec for the specified scalar data type. + + This method has two uses. The first is to register a builtin + codec for an extension type without a stable OID, such as 'hstore'. + The second use is to declare that an extension type or a + user-defined type is wire-compatible with a certain builtin + data type and should be exchanged as such. + + :param typename: + Name of the data type the codec is for. + + :param schema: + Schema name of the data type the codec is for + (defaults to ``'public'``). + + :param codec_name: + The name of the builtin codec to use for the type. + This should be either the name of a known core type + (such as ``"int"``), or the name of a supported extension + type. Currently, the only supported extension type is + ``"pg_contrib.hstore"``. + + :param format: + If *format* is ``None`` (the default), all formats supported + by the target codec are declared to be supported for *typename*. + If *format* is ``'text'`` or ``'binary'``, then only the + specified format is declared to be supported for *typename*. + + .. versionchanged:: 0.18.0 + The *codec_name* argument can be the name of any known + core data type. Added the *format* keyword argument. + """ + self._check_open() + typeinfo = await self._introspect_type(typename, schema) + if not introspection.is_scalar_type(typeinfo): + raise exceptions.InterfaceError( + 'cannot alias non-scalar type {}.{}'.format( + schema, typename)) + + oid = typeinfo['oid'] + + self._protocol.get_settings().set_builtin_type_codec( + oid, typename, schema, 'scalar', codec_name, format) + + # Statement cache is no longer valid due to codec changes. + self._drop_local_statement_cache() + + def is_closed(self): + """Return ``True`` if the connection is closed, ``False`` otherwise. + + :return bool: ``True`` if the connection is closed, ``False`` + otherwise. + """ + return self._aborted or not self._protocol.is_connected() + + async def close(self, *, timeout=None): + """Close the connection gracefully. + + :param float timeout: + Optional timeout value in seconds. + + .. versionchanged:: 0.14.0 + Added the *timeout* parameter. + """ + try: + if not self.is_closed(): + await self._protocol.close(timeout) + except (Exception, asyncio.CancelledError): + # If we fail to close gracefully, abort the connection. + self._abort() + raise + finally: + self._cleanup() + + def terminate(self): + """Terminate the connection without waiting for pending data.""" + if not self.is_closed(): + self._abort() + self._cleanup() + + async def reset(self, *, timeout=None): + self._check_open() + self._listeners.clear() + self._log_listeners.clear() + reset_query = self._get_reset_query() + + if self._protocol.is_in_transaction() or self._top_xact is not None: + if self._top_xact is None or not self._top_xact._managed: + # Managed transactions are guaranteed to __aexit__ + # correctly. + self._loop.call_exception_handler({ + 'message': 'Resetting connection with an ' + 'active transaction {!r}'.format(self) + }) + + self._top_xact = None + reset_query = 'ROLLBACK;\n' + reset_query + + if reset_query: + await self.execute(reset_query, timeout=timeout) + + def _abort(self): + # Put the connection into the aborted state. + self._aborted = True + self._protocol.abort() + self._protocol = None + + def _cleanup(self): + self._call_termination_listeners() + # Free the resources associated with this connection. + # This must be called when a connection is terminated. + + if self._proxy is not None: + # Connection is a member of a pool, so let the pool + # know that this connection is dead. + self._proxy._holder._release_on_close() + + self._mark_stmts_as_closed() + self._listeners.clear() + self._log_listeners.clear() + self._clean_tasks() + + def _clean_tasks(self): + # Wrap-up any remaining tasks associated with this connection. + if self._cancellations: + for fut in self._cancellations: + if not fut.done(): + fut.cancel() + self._cancellations.clear() + + def _check_open(self): + if self.is_closed(): + raise exceptions.InterfaceError('connection is closed') + + def _get_unique_id(self, prefix): + global _uid + _uid += 1 + return '__asyncpg_{}_{:x}__'.format(prefix, _uid) + + def _mark_stmts_as_closed(self): + for stmt in self._stmt_cache.iter_statements(): + stmt.mark_closed() + + for stmt in self._stmts_to_close: + stmt.mark_closed() + + self._stmt_cache.clear() + self._stmts_to_close.clear() + + def _maybe_gc_stmt(self, stmt): + if ( + stmt.refs == 0 + and not self._stmt_cache.has( + (stmt.query, stmt.record_class, stmt.ignore_custom_codec) + ) + ): + # If low-level `stmt` isn't referenced from any high-level + # `PreparedStatement` object and is not in the `_stmt_cache`: + # + # * mark it as closed, which will make it non-usable + # for any `PreparedStatement` or for methods like + # `Connection.fetch()`. + # + # * schedule it to be formally closed on the server. + stmt.mark_closed() + self._stmts_to_close.add(stmt) + + async def _cleanup_stmts(self): + # Called whenever we create a new prepared statement in + # `Connection._get_statement()` and `_stmts_to_close` is + # not empty. + to_close = self._stmts_to_close + self._stmts_to_close = set() + for stmt in to_close: + # It is imperative that statements are cleaned properly, + # so we ignore the timeout. + await self._protocol.close_statement(stmt, protocol.NO_TIMEOUT) + + async def _cancel(self, waiter): + try: + # Open new connection to the server + await connect_utils._cancel( + loop=self._loop, addr=self._addr, params=self._params, + backend_pid=self._protocol.backend_pid, + backend_secret=self._protocol.backend_secret) + except ConnectionResetError as ex: + # On some systems Postgres will reset the connection + # after processing the cancellation command. + if not waiter.done(): + waiter.set_exception(ex) + except asyncio.CancelledError: + # There are two scenarios in which the cancellation + # itself will be cancelled: 1) the connection is being closed, + # 2) the event loop is being shut down. + # In either case we do not care about the propagation of + # the CancelledError, and don't want the loop to warn about + # an unretrieved exception. + pass + except (Exception, asyncio.CancelledError) as ex: + if not waiter.done(): + waiter.set_exception(ex) + finally: + self._cancellations.discard( + asyncio.current_task(self._loop)) + if not waiter.done(): + waiter.set_result(None) + + def _cancel_current_command(self, waiter): + self._cancellations.add(self._loop.create_task(self._cancel(waiter))) + + def _process_log_message(self, fields, last_query): + if not self._log_listeners: + return + + message = exceptions.PostgresLogMessage.new(fields, query=last_query) + + con_ref = self._unwrap() + for cb in self._log_listeners: + if cb.is_async: + self._loop.create_task(cb.cb(con_ref, message)) + else: + self._loop.call_soon(cb.cb, con_ref, message) + + def _call_termination_listeners(self): + if not self._termination_listeners: + return + + con_ref = self._unwrap() + for cb in self._termination_listeners: + if cb.is_async: + self._loop.create_task(cb.cb(con_ref)) + else: + self._loop.call_soon(cb.cb, con_ref) + + self._termination_listeners.clear() + + def _process_notification(self, pid, channel, payload): + if channel not in self._listeners: + return + + con_ref = self._unwrap() + for cb in self._listeners[channel]: + if cb.is_async: + self._loop.create_task(cb.cb(con_ref, pid, channel, payload)) + else: + self._loop.call_soon(cb.cb, con_ref, pid, channel, payload) + + def _unwrap(self): + if self._proxy is None: + con_ref = self + else: + # `_proxy` is not None when the connection is a member + # of a connection pool. Which means that the user is working + # with a `PoolConnectionProxy` instance, and expects to see it + # (and not the actual Connection) in their event callbacks. + con_ref = self._proxy + return con_ref + + def _get_reset_query(self): + if self._reset_query is not None: + return self._reset_query + + caps = self._server_caps + + _reset_query = [] + if caps.advisory_locks: + _reset_query.append('SELECT pg_advisory_unlock_all();') + if caps.sql_close_all: + _reset_query.append('CLOSE ALL;') + if caps.notifications and caps.plpgsql: + _reset_query.append('UNLISTEN *;') + if caps.sql_reset: + _reset_query.append('RESET ALL;') + + _reset_query = '\n'.join(_reset_query) + self._reset_query = _reset_query + + return _reset_query + + def _set_proxy(self, proxy): + if self._proxy is not None and proxy is not None: + # Should not happen unless there is a bug in `Pool`. + raise exceptions.InterfaceError( + 'internal asyncpg error: connection is already proxied') + + self._proxy = proxy + + def _check_listeners(self, listeners, listener_type): + if listeners: + count = len(listeners) + + w = exceptions.InterfaceWarning( + '{conn!r} is being released to the pool but has {c} active ' + '{type} listener{s}'.format( + conn=self, c=count, type=listener_type, + s='s' if count > 1 else '')) + + warnings.warn(w) + + def _on_release(self, stacklevel=1): + # Invalidate external references to the connection. + self._pool_release_ctr += 1 + # Called when the connection is about to be released to the pool. + # Let's check that the user has not left any listeners on it. + self._check_listeners( + list(itertools.chain.from_iterable(self._listeners.values())), + 'notification') + self._check_listeners( + self._log_listeners, 'log') + + def _drop_local_statement_cache(self): + self._stmt_cache.clear() + + def _drop_global_statement_cache(self): + if self._proxy is not None: + # This connection is a member of a pool, so we delegate + # the cache drop to the pool. + pool = self._proxy._holder._pool + pool._drop_statement_cache() + else: + self._drop_local_statement_cache() + + def _drop_local_type_cache(self): + self._protocol.get_settings().clear_type_cache() + + def _drop_global_type_cache(self): + if self._proxy is not None: + # This connection is a member of a pool, so we delegate + # the cache drop to the pool. + pool = self._proxy._holder._pool + pool._drop_type_cache() + else: + self._drop_local_type_cache() + + async def reload_schema_state(self): + """Indicate that the database schema information must be reloaded. + + For performance reasons, asyncpg caches certain aspects of the + database schema, such as the layout of composite types. Consequently, + when the database schema changes, and asyncpg is not able to + gracefully recover from an error caused by outdated schema + assumptions, an :exc:`~asyncpg.exceptions.OutdatedSchemaCacheError` + is raised. To prevent the exception, this method may be used to inform + asyncpg that the database schema has changed. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def change_type(con): + ... result = await con.fetch('SELECT id, info FROM tbl') + ... # Change composite's attribute type "int"=>"text" + ... await con.execute('ALTER TYPE custom DROP ATTRIBUTE y') + ... await con.execute('ALTER TYPE custom ADD ATTRIBUTE y text') + ... await con.reload_schema_state() + ... for id_, info in result: + ... new = (info['x'], str(info['y'])) + ... await con.execute( + ... 'UPDATE tbl SET info=$2 WHERE id=$1', id_, new) + ... + >>> async def run(): + ... # Initial schema: + ... # CREATE TYPE custom AS (x int, y int); + ... # CREATE TABLE tbl(id int, info custom); + ... con = await asyncpg.connect(user='postgres') + ... async with con.transaction(): + ... # Prevent concurrent changes in the table + ... await con.execute('LOCK TABLE tbl') + ... await change_type(con) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + + .. versionadded:: 0.14.0 + """ + self._drop_global_type_cache() + self._drop_global_statement_cache() + + async def _execute( + self, + query, + args, + limit, + timeout, + *, + return_status=False, + ignore_custom_codec=False, + record_class=None + ): + with self._stmt_exclusive_section: + result, _ = await self.__execute( + query, + args, + limit, + timeout, + return_status=return_status, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + return result + + async def __execute( + self, + query, + args, + limit, + timeout, + *, + return_status=False, + ignore_custom_codec=False, + record_class=None + ): + executor = lambda stmt, timeout: self._protocol.bind_execute( + stmt, args, '', limit, return_status, timeout) + timeout = self._protocol._get_timeout(timeout) + return await self._do_execute( + query, + executor, + timeout, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + + async def _executemany(self, query, args, timeout): + executor = lambda stmt, timeout: self._protocol.bind_execute_many( + stmt, args, '', timeout) + timeout = self._protocol._get_timeout(timeout) + with self._stmt_exclusive_section: + result, _ = await self._do_execute(query, executor, timeout) + return result + + async def _do_execute( + self, + query, + executor, + timeout, + retry=True, + *, + ignore_custom_codec=False, + record_class=None + ): + if timeout is None: + stmt = await self._get_statement( + query, + None, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + else: + before = time.monotonic() + stmt = await self._get_statement( + query, + timeout, + record_class=record_class, + ignore_custom_codec=ignore_custom_codec, + ) + after = time.monotonic() + timeout -= after - before + before = after + + try: + if timeout is None: + result = await executor(stmt, None) + else: + try: + result = await executor(stmt, timeout) + finally: + after = time.monotonic() + timeout -= after - before + + except exceptions.OutdatedSchemaCacheError: + # This exception is raised when we detect a difference between + # cached type's info and incoming tuple from the DB (when a type is + # changed by the ALTER TYPE). + # It is not possible to recover (the statement is already done at + # the server's side), the only way is to drop our caches and + # reraise the exception to the caller. + await self.reload_schema_state() + raise + except exceptions.InvalidCachedStatementError: + # PostgreSQL will raise an exception when it detects + # that the result type of the query has changed from + # when the statement was prepared. This may happen, + # for example, after an ALTER TABLE or SET search_path. + # + # When this happens, and there is no transaction running, + # we can simply re-prepare the statement and try once + # again. We deliberately retry only once as this is + # supposed to be a rare occurrence. + # + # If the transaction _is_ running, this error will put it + # into an error state, and we have no choice but to + # re-raise the exception. + # + # In either case we clear the statement cache for this + # connection and all other connections of the pool this + # connection belongs to (if any). + # + # See https://github.com/MagicStack/asyncpg/issues/72 + # and https://github.com/MagicStack/asyncpg/issues/76 + # for discussion. + # + self._drop_global_statement_cache() + if self._protocol.is_in_transaction() or not retry: + raise + else: + return await self._do_execute( + query, executor, timeout, retry=False) + + return result, stmt + + +async def connect(dsn=None, *, + host=None, port=None, + user=None, password=None, passfile=None, + database=None, + loop=None, + timeout=60, + statement_cache_size=100, + max_cached_statement_lifetime=300, + max_cacheable_statement_size=1024 * 15, + command_timeout=None, + ssl=None, + direct_tls=False, + connection_class=Connection, + record_class=protocol.Record, + server_settings=None): + r"""A coroutine to establish a connection to a PostgreSQL server. + + The connection parameters may be specified either as a connection + URI in *dsn*, or as specific keyword arguments, or both. + If both *dsn* and keyword arguments are specified, the latter + override the corresponding values parsed from the connection URI. + The default values for the majority of arguments can be specified + using `environment variables `_. + + Returns a new :class:`~asyncpg.connection.Connection` object. + + :param dsn: + Connection arguments specified using as a single string in the + `libpq connection URI format`_: + ``postgres://user:password@host:port/database?option=value``. + The following options are recognized by asyncpg: ``host``, + ``port``, ``user``, ``database`` (or ``dbname``), ``password``, + ``passfile``, ``sslmode``, ``sslcert``, ``sslkey``, ``sslrootcert``, + and ``sslcrl``. Unlike libpq, asyncpg will treat unrecognized + options as `server settings`_ to be used for the connection. + + .. note:: + + The URI must be *valid*, which means that all components must + be properly quoted with :py:func:`urllib.parse.quote`, and + any literal IPv6 addresses must be enclosed in square brackets. + For example: + + .. code-block:: text + + postgres://dbuser@[fe80::1ff:fe23:4567:890a%25eth0]/dbname + + :param host: + Database host address as one of the following: + + - an IP address or a domain name; + - an absolute path to the directory containing the database + server Unix-domain socket (not supported on Windows); + - a sequence of any of the above, in which case the addresses + will be tried in order, and the first successful connection + will be returned. + + If not specified, asyncpg will try the following, in order: + + - host address(es) parsed from the *dsn* argument, + - the value of the ``PGHOST`` environment variable, + - on Unix, common directories used for PostgreSQL Unix-domain + sockets: ``"/run/postgresql"``, ``"/var/run/postgresl"``, + ``"/var/pgsql_socket"``, ``"/private/tmp"``, and ``"/tmp"``, + - ``"localhost"``. + + :param port: + Port number to connect to at the server host + (or Unix-domain socket file extension). If multiple host + addresses were specified, this parameter may specify a + sequence of port numbers of the same length as the host sequence, + or it may specify a single port number to be used for all host + addresses. + + If not specified, the value parsed from the *dsn* argument is used, + or the value of the ``PGPORT`` environment variable, or ``5432`` if + neither is specified. + + :param user: + The name of the database role used for authentication. + + If not specified, the value parsed from the *dsn* argument is used, + or the value of the ``PGUSER`` environment variable, or the + operating system name of the user running the application. + + :param database: + The name of the database to connect to. + + If not specified, the value parsed from the *dsn* argument is used, + or the value of the ``PGDATABASE`` environment variable, or the + computed value of the *user* argument. + + :param password: + Password to be used for authentication, if the server requires + one. If not specified, the value parsed from the *dsn* argument + is used, or the value of the ``PGPASSWORD`` environment variable. + Note that the use of the environment variable is discouraged as + other users and applications may be able to read it without needing + specific privileges. It is recommended to use *passfile* instead. + + Password may be either a string, or a callable that returns a string. + If a callable is provided, it will be called each time a new connection + is established. + + :param passfile: + The name of the file used to store passwords + (defaults to ``~/.pgpass``, or ``%APPDATA%\postgresql\pgpass.conf`` + on Windows). + + :param loop: + An asyncio event loop instance. If ``None``, the default + event loop will be used. + + :param float timeout: + Connection timeout in seconds. + + :param int statement_cache_size: + The size of prepared statement LRU cache. Pass ``0`` to + disable the cache. + + :param int max_cached_statement_lifetime: + The maximum time in seconds a prepared statement will stay + in the cache. Pass ``0`` to allow statements be cached + indefinitely. + + :param int max_cacheable_statement_size: + The maximum size of a statement that can be cached (15KiB by + default). Pass ``0`` to allow all statements to be cached + regardless of their size. + + :param float command_timeout: + The default timeout for operations on this connection + (the default is ``None``: no timeout). + + :param ssl: + Pass ``True`` or an `ssl.SSLContext `_ instance to + require an SSL connection. If ``True``, a default SSL context + returned by `ssl.create_default_context() `_ + will be used. The value can also be one of the following strings: + + - ``'disable'`` - SSL is disabled (equivalent to ``False``) + - ``'prefer'`` - try SSL first, fallback to non-SSL connection + if SSL connection fails + - ``'allow'`` - try without SSL first, then retry with SSL if the first + attempt fails. + - ``'require'`` - only try an SSL connection. Certificate + verification errors are ignored + - ``'verify-ca'`` - only try an SSL connection, and verify + that the server certificate is issued by a trusted certificate + authority (CA) + - ``'verify-full'`` - only try an SSL connection, verify + that the server certificate is issued by a trusted CA and + that the requested server host name matches that in the + certificate. + + The default is ``'prefer'``: try an SSL connection and fallback to + non-SSL connection if that fails. + + .. note:: + + *ssl* is ignored for Unix domain socket communication. + + Example of programmatic SSL context configuration that is equivalent + to ``sslmode=verify-full&sslcert=..&sslkey=..&sslrootcert=..``: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> import ssl + >>> async def main(): + ... # Load CA bundle for server certificate verification, + ... # equivalent to sslrootcert= in DSN. + ... sslctx = ssl.create_default_context( + ... ssl.Purpose.SERVER_AUTH, + ... cafile="path/to/ca_bundle.pem") + ... # If True, equivalent to sslmode=verify-full, if False: + ... # sslmode=verify-ca. + ... sslctx.check_hostname = True + ... # Load client certificate and private key for client + ... # authentication, equivalent to sslcert= and sslkey= in + ... # DSN. + ... sslctx.load_cert_chain( + ... "path/to/client.cert", + ... keyfile="path/to/client.key", + ... ) + ... con = await asyncpg.connect(user='postgres', ssl=sslctx) + ... await con.close() + >>> asyncio.run(run()) + + Example of programmatic SSL context configuration that is equivalent + to ``sslmode=require`` (no server certificate or host verification): + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> import ssl + >>> async def main(): + ... sslctx = ssl.create_default_context( + ... ssl.Purpose.SERVER_AUTH) + ... sslctx.check_hostname = False + ... sslctx.verify_mode = ssl.CERT_NONE + ... con = await asyncpg.connect(user='postgres', ssl=sslctx) + ... await con.close() + >>> asyncio.run(run()) + + :param bool direct_tls: + Pass ``True`` to skip PostgreSQL STARTTLS mode and perform a direct + SSL connection. Must be used alongside ``ssl`` param. + + :param dict server_settings: + An optional dict of server runtime parameters. Refer to + PostgreSQL documentation for + a `list of supported options `_. + + :param type connection_class: + Class of the returned connection object. Must be a subclass of + :class:`~asyncpg.connection.Connection`. + + :param type record_class: + If specified, the class to use for records returned by queries on + this connection object. Must be a subclass of + :class:`~asyncpg.Record`. + + :return: A :class:`~asyncpg.connection.Connection` instance. + + Example: + + .. code-block:: pycon + + >>> import asyncpg + >>> import asyncio + >>> async def run(): + ... con = await asyncpg.connect(user='postgres') + ... types = await con.fetch('SELECT * FROM pg_type') + ... print(types) + ... + >>> asyncio.get_event_loop().run_until_complete(run()) + [= 0 + self._max_size = new_size + self._maybe_cleanup() + + def get_max_lifetime(self): + return self._max_lifetime + + def set_max_lifetime(self, new_lifetime): + assert new_lifetime >= 0 + self._max_lifetime = new_lifetime + for entry in self._entries.values(): + # For every entry cancel the existing callback + # and setup a new one if necessary. + self._set_entry_timeout(entry) + + def get(self, query, *, promote=True): + if not self._max_size: + # The cache is disabled. + return + + entry = self._entries.get(query) # type: _StatementCacheEntry + if entry is None: + return + + if entry._statement.closed: + # Happens in unittests when we call `stmt._state.mark_closed()` + # manually or when a prepared statement closes itself on type + # cache error. + self._entries.pop(query) + self._clear_entry_callback(entry) + return + + if promote: + # `promote` is `False` when `get()` is called by `has()`. + self._entries.move_to_end(query, last=True) + + return entry._statement + + def has(self, query): + return self.get(query, promote=False) is not None + + def put(self, query, statement): + if not self._max_size: + # The cache is disabled. + return + + self._entries[query] = self._new_entry(query, statement) + + # Check if the cache is bigger than max_size and trim it + # if necessary. + self._maybe_cleanup() + + def iter_statements(self): + return (e._statement for e in self._entries.values()) + + def clear(self): + # Store entries for later. + entries = tuple(self._entries.values()) + + # Clear the entries dict. + self._entries.clear() + + # Make sure that we cancel all scheduled callbacks + # and call on_remove callback for each entry. + for entry in entries: + self._clear_entry_callback(entry) + self._on_remove(entry._statement) + + def _set_entry_timeout(self, entry): + # Clear the existing timeout. + self._clear_entry_callback(entry) + + # Set the new timeout if it's not 0. + if self._max_lifetime: + entry._cleanup_cb = self._loop.call_later( + self._max_lifetime, self._on_entry_expired, entry) + + def _new_entry(self, query, statement): + entry = _StatementCacheEntry(self, query, statement) + self._set_entry_timeout(entry) + return entry + + def _on_entry_expired(self, entry): + # `call_later` callback, called when an entry stayed longer + # than `self._max_lifetime`. + if self._entries.get(entry._query) is entry: + self._entries.pop(entry._query) + self._on_remove(entry._statement) + + def _clear_entry_callback(self, entry): + if entry._cleanup_cb is not None: + entry._cleanup_cb.cancel() + + def _maybe_cleanup(self): + # Delete cache entries until the size of the cache is `max_size`. + while len(self._entries) > self._max_size: + old_query, old_entry = self._entries.popitem(last=False) + self._clear_entry_callback(old_entry) + + # Let the connection know that the statement was removed + # from the cache. + self._on_remove(old_entry._statement) + + +class _Callback(typing.NamedTuple): + + cb: typing.Callable[..., None] + is_async: bool + + @classmethod + def from_callable(cls, cb: typing.Callable[..., None]) -> '_Callback': + if inspect.iscoroutinefunction(cb): + is_async = True + elif callable(cb): + is_async = False + else: + raise exceptions.InterfaceError( + 'expected a callable or an `async def` function,' + 'got {!r}'.format(cb) + ) + + return cls(cb, is_async) + + +class _Atomic: + __slots__ = ('_acquired',) + + def __init__(self): + self._acquired = 0 + + def __enter__(self): + if self._acquired: + raise exceptions.InterfaceError( + 'cannot perform operation: another operation is in progress') + self._acquired = 1 + + def __exit__(self, t, e, tb): + self._acquired = 0 + + +class _ConnectionProxy: + # Base class to enable `isinstance(Connection)` check. + __slots__ = () + + +ServerCapabilities = collections.namedtuple( + 'ServerCapabilities', + ['advisory_locks', 'notifications', 'plpgsql', 'sql_reset', + 'sql_close_all']) +ServerCapabilities.__doc__ = 'PostgreSQL server capabilities.' + + +def _detect_server_capabilities(server_version, connection_settings): + if hasattr(connection_settings, 'padb_revision'): + # Amazon Redshift detected. + advisory_locks = False + notifications = False + plpgsql = False + sql_reset = True + sql_close_all = False + elif hasattr(connection_settings, 'crdb_version'): + # CockroachDB detected. + advisory_locks = False + notifications = False + plpgsql = False + sql_reset = False + sql_close_all = False + elif hasattr(connection_settings, 'crate_version'): + # CrateDB detected. + advisory_locks = False + notifications = False + plpgsql = False + sql_reset = False + sql_close_all = False + else: + # Standard PostgreSQL server assumed. + advisory_locks = True + notifications = True + plpgsql = True + sql_reset = True + sql_close_all = True + + return ServerCapabilities( + advisory_locks=advisory_locks, + notifications=notifications, + plpgsql=plpgsql, + sql_reset=sql_reset, + sql_close_all=sql_close_all + ) + + +def _extract_stack(limit=10): + """Replacement for traceback.extract_stack() that only does the + necessary work for asyncio debug mode. + """ + frame = sys._getframe().f_back + try: + stack = traceback.StackSummary.extract( + traceback.walk_stack(frame), lookup_lines=False) + finally: + del frame + + apg_path = asyncpg.__path__[0] + i = 0 + while i < len(stack) and stack[i][0].startswith(apg_path): + i += 1 + stack = stack[i:i + limit] + + stack.reverse() + return ''.join(traceback.format_list(stack)) + + +def _check_record_class(record_class): + if record_class is protocol.Record: + pass + elif ( + isinstance(record_class, type) + and issubclass(record_class, protocol.Record) + ): + if ( + record_class.__new__ is not object.__new__ + or record_class.__init__ is not object.__init__ + ): + raise exceptions.InterfaceError( + 'record_class must not redefine __new__ or __init__' + ) + else: + raise exceptions.InterfaceError( + 'record_class is expected to be a subclass of ' + 'asyncpg.Record, got {!r}'.format(record_class) + ) + + +def _weak_maybe_gc_stmt(weak_ref, stmt): + self = weak_ref() + if self is not None: + self._maybe_gc_stmt(stmt) + + +_uid = 0 diff --git a/sbsheriff/Lib/site-packages/asyncpg/connresource.py b/sbsheriff/Lib/site-packages/asyncpg/connresource.py new file mode 100644 index 0000000..3b0c1d3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/connresource.py @@ -0,0 +1,44 @@ + +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import functools + +from . import exceptions + + +def guarded(meth): + """A decorator to add a sanity check to ConnectionResource methods.""" + + @functools.wraps(meth) + def _check(self, *args, **kwargs): + self._check_conn_validity(meth.__name__) + return meth(self, *args, **kwargs) + + return _check + + +class ConnectionResource: + __slots__ = ('_connection', '_con_release_ctr') + + def __init__(self, connection): + self._connection = connection + self._con_release_ctr = connection._pool_release_ctr + + def _check_conn_validity(self, meth_name): + con_release_ctr = self._connection._pool_release_ctr + if con_release_ctr != self._con_release_ctr: + raise exceptions.InterfaceError( + 'cannot call {}.{}(): ' + 'the underlying connection has been released back ' + 'to the pool'.format(self.__class__.__name__, meth_name)) + + if self._connection.is_closed(): + raise exceptions.InterfaceError( + 'cannot call {}.{}(): ' + 'the underlying connection is closed'.format( + self.__class__.__name__, meth_name)) diff --git a/sbsheriff/Lib/site-packages/asyncpg/cursor.py b/sbsheriff/Lib/site-packages/asyncpg/cursor.py new file mode 100644 index 0000000..7ec159b --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/cursor.py @@ -0,0 +1,309 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import collections + +from . import connresource +from . import exceptions + + +class CursorFactory(connresource.ConnectionResource): + """A cursor interface for the results of a query. + + A cursor interface can be used to initiate efficient traversal of the + results of a large query. + """ + + __slots__ = ( + '_state', + '_args', + '_prefetch', + '_query', + '_timeout', + '_record_class', + ) + + def __init__( + self, + connection, + query, + state, + args, + prefetch, + timeout, + record_class + ): + super().__init__(connection) + self._args = args + self._prefetch = prefetch + self._query = query + self._timeout = timeout + self._state = state + self._record_class = record_class + if state is not None: + state.attach() + + @connresource.guarded + def __aiter__(self): + prefetch = 50 if self._prefetch is None else self._prefetch + return CursorIterator( + self._connection, + self._query, + self._state, + self._args, + self._record_class, + prefetch, + self._timeout, + ) + + @connresource.guarded + def __await__(self): + if self._prefetch is not None: + raise exceptions.InterfaceError( + 'prefetch argument can only be specified for iterable cursor') + cursor = Cursor( + self._connection, + self._query, + self._state, + self._args, + self._record_class, + ) + return cursor._init(self._timeout).__await__() + + def __del__(self): + if self._state is not None: + self._state.detach() + self._connection._maybe_gc_stmt(self._state) + + +class BaseCursor(connresource.ConnectionResource): + + __slots__ = ( + '_state', + '_args', + '_portal_name', + '_exhausted', + '_query', + '_record_class', + ) + + def __init__(self, connection, query, state, args, record_class): + super().__init__(connection) + self._args = args + self._state = state + if state is not None: + state.attach() + self._portal_name = None + self._exhausted = False + self._query = query + self._record_class = record_class + + def _check_ready(self): + if self._state is None: + raise exceptions.InterfaceError( + 'cursor: no associated prepared statement') + + if self._state.closed: + raise exceptions.InterfaceError( + 'cursor: the prepared statement is closed') + + if not self._connection._top_xact: + raise exceptions.NoActiveSQLTransactionError( + 'cursor cannot be created outside of a transaction') + + async def _bind_exec(self, n, timeout): + self._check_ready() + + if self._portal_name: + raise exceptions.InterfaceError( + 'cursor already has an open portal') + + con = self._connection + protocol = con._protocol + + self._portal_name = con._get_unique_id('portal') + buffer, _, self._exhausted = await protocol.bind_execute( + self._state, self._args, self._portal_name, n, True, timeout) + return buffer + + async def _bind(self, timeout): + self._check_ready() + + if self._portal_name: + raise exceptions.InterfaceError( + 'cursor already has an open portal') + + con = self._connection + protocol = con._protocol + + self._portal_name = con._get_unique_id('portal') + buffer = await protocol.bind(self._state, self._args, + self._portal_name, + timeout) + return buffer + + async def _exec(self, n, timeout): + self._check_ready() + + if not self._portal_name: + raise exceptions.InterfaceError( + 'cursor does not have an open portal') + + protocol = self._connection._protocol + buffer, _, self._exhausted = await protocol.execute( + self._state, self._portal_name, n, True, timeout) + return buffer + + def __repr__(self): + attrs = [] + if self._exhausted: + attrs.append('exhausted') + attrs.append('') # to separate from id + + if self.__class__.__module__.startswith('asyncpg.'): + mod = 'asyncpg' + else: + mod = self.__class__.__module__ + + return '<{}.{} "{!s:.30}" {}{:#x}>'.format( + mod, self.__class__.__name__, + self._state.query, + ' '.join(attrs), id(self)) + + def __del__(self): + if self._state is not None: + self._state.detach() + self._connection._maybe_gc_stmt(self._state) + + +class CursorIterator(BaseCursor): + + __slots__ = ('_buffer', '_prefetch', '_timeout') + + def __init__( + self, + connection, + query, + state, + args, + record_class, + prefetch, + timeout + ): + super().__init__(connection, query, state, args, record_class) + + if prefetch <= 0: + raise exceptions.InterfaceError( + 'prefetch argument must be greater than zero') + + self._buffer = collections.deque() + self._prefetch = prefetch + self._timeout = timeout + + @connresource.guarded + def __aiter__(self): + return self + + @connresource.guarded + async def __anext__(self): + if self._state is None: + self._state = await self._connection._get_statement( + self._query, + self._timeout, + named=True, + record_class=self._record_class, + ) + self._state.attach() + + if not self._portal_name: + buffer = await self._bind_exec(self._prefetch, self._timeout) + self._buffer.extend(buffer) + + if not self._buffer and not self._exhausted: + buffer = await self._exec(self._prefetch, self._timeout) + self._buffer.extend(buffer) + + if self._buffer: + return self._buffer.popleft() + + raise StopAsyncIteration + + +class Cursor(BaseCursor): + """An open *portal* into the results of a query.""" + + __slots__ = () + + async def _init(self, timeout): + if self._state is None: + self._state = await self._connection._get_statement( + self._query, + timeout, + named=True, + record_class=self._record_class, + ) + self._state.attach() + self._check_ready() + await self._bind(timeout) + return self + + @connresource.guarded + async def fetch(self, n, *, timeout=None): + r"""Return the next *n* rows as a list of :class:`Record` objects. + + :param float timeout: Optional timeout value in seconds. + + :return: A list of :class:`Record` instances. + """ + self._check_ready() + if n <= 0: + raise exceptions.InterfaceError('n must be greater than zero') + if self._exhausted: + return [] + recs = await self._exec(n, timeout) + if len(recs) < n: + self._exhausted = True + return recs + + @connresource.guarded + async def fetchrow(self, *, timeout=None): + r"""Return the next row. + + :param float timeout: Optional timeout value in seconds. + + :return: A :class:`Record` instance. + """ + self._check_ready() + if self._exhausted: + return None + recs = await self._exec(1, timeout) + if len(recs) < 1: + self._exhausted = True + return None + return recs[0] + + @connresource.guarded + async def forward(self, n, *, timeout=None) -> int: + r"""Skip over the next *n* rows. + + :param float timeout: Optional timeout value in seconds. + + :return: A number of rows actually skipped over (<= *n*). + """ + self._check_ready() + if n <= 0: + raise exceptions.InterfaceError('n must be greater than zero') + + protocol = self._connection._protocol + status = await protocol.query('MOVE FORWARD {:d} {}'.format( + n, self._portal_name), timeout) + + advanced = int(status.split()[1]) + if advanced < n: + self._exhausted = True + + return advanced diff --git a/sbsheriff/Lib/site-packages/asyncpg/exceptions/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__init__.py new file mode 100644 index 0000000..c5b5ccc --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__init__.py @@ -0,0 +1,1193 @@ +# GENERATED FROM postgresql/src/backend/utils/errcodes.txt +# DO NOT MODIFY, use tools/generate_exceptions.py to update + +from ._base import * # NOQA +from . import _base + + +class PostgresWarning(_base.PostgresLogMessage, Warning): + sqlstate = '01000' + + +class DynamicResultSetsReturned(PostgresWarning): + sqlstate = '0100C' + + +class ImplicitZeroBitPadding(PostgresWarning): + sqlstate = '01008' + + +class NullValueEliminatedInSetFunction(PostgresWarning): + sqlstate = '01003' + + +class PrivilegeNotGranted(PostgresWarning): + sqlstate = '01007' + + +class PrivilegeNotRevoked(PostgresWarning): + sqlstate = '01006' + + +class StringDataRightTruncation(PostgresWarning): + sqlstate = '01004' + + +class DeprecatedFeature(PostgresWarning): + sqlstate = '01P01' + + +class NoData(PostgresWarning): + sqlstate = '02000' + + +class NoAdditionalDynamicResultSetsReturned(NoData): + sqlstate = '02001' + + +class SQLStatementNotYetCompleteError(_base.PostgresError): + sqlstate = '03000' + + +class PostgresConnectionError(_base.PostgresError): + sqlstate = '08000' + + +class ConnectionDoesNotExistError(PostgresConnectionError): + sqlstate = '08003' + + +class ConnectionFailureError(PostgresConnectionError): + sqlstate = '08006' + + +class ClientCannotConnectError(PostgresConnectionError): + sqlstate = '08001' + + +class ConnectionRejectionError(PostgresConnectionError): + sqlstate = '08004' + + +class TransactionResolutionUnknownError(PostgresConnectionError): + sqlstate = '08007' + + +class ProtocolViolationError(PostgresConnectionError): + sqlstate = '08P01' + + +class TriggeredActionError(_base.PostgresError): + sqlstate = '09000' + + +class FeatureNotSupportedError(_base.PostgresError): + sqlstate = '0A000' + + +class InvalidCachedStatementError(FeatureNotSupportedError): + pass + + +class InvalidTransactionInitiationError(_base.PostgresError): + sqlstate = '0B000' + + +class LocatorError(_base.PostgresError): + sqlstate = '0F000' + + +class InvalidLocatorSpecificationError(LocatorError): + sqlstate = '0F001' + + +class InvalidGrantorError(_base.PostgresError): + sqlstate = '0L000' + + +class InvalidGrantOperationError(InvalidGrantorError): + sqlstate = '0LP01' + + +class InvalidRoleSpecificationError(_base.PostgresError): + sqlstate = '0P000' + + +class DiagnosticsError(_base.PostgresError): + sqlstate = '0Z000' + + +class StackedDiagnosticsAccessedWithoutActiveHandlerError(DiagnosticsError): + sqlstate = '0Z002' + + +class CaseNotFoundError(_base.PostgresError): + sqlstate = '20000' + + +class CardinalityViolationError(_base.PostgresError): + sqlstate = '21000' + + +class DataError(_base.PostgresError): + sqlstate = '22000' + + +class ArraySubscriptError(DataError): + sqlstate = '2202E' + + +class CharacterNotInRepertoireError(DataError): + sqlstate = '22021' + + +class DatetimeFieldOverflowError(DataError): + sqlstate = '22008' + + +class DivisionByZeroError(DataError): + sqlstate = '22012' + + +class ErrorInAssignmentError(DataError): + sqlstate = '22005' + + +class EscapeCharacterConflictError(DataError): + sqlstate = '2200B' + + +class IndicatorOverflowError(DataError): + sqlstate = '22022' + + +class IntervalFieldOverflowError(DataError): + sqlstate = '22015' + + +class InvalidArgumentForLogarithmError(DataError): + sqlstate = '2201E' + + +class InvalidArgumentForNtileFunctionError(DataError): + sqlstate = '22014' + + +class InvalidArgumentForNthValueFunctionError(DataError): + sqlstate = '22016' + + +class InvalidArgumentForPowerFunctionError(DataError): + sqlstate = '2201F' + + +class InvalidArgumentForWidthBucketFunctionError(DataError): + sqlstate = '2201G' + + +class InvalidCharacterValueForCastError(DataError): + sqlstate = '22018' + + +class InvalidDatetimeFormatError(DataError): + sqlstate = '22007' + + +class InvalidEscapeCharacterError(DataError): + sqlstate = '22019' + + +class InvalidEscapeOctetError(DataError): + sqlstate = '2200D' + + +class InvalidEscapeSequenceError(DataError): + sqlstate = '22025' + + +class NonstandardUseOfEscapeCharacterError(DataError): + sqlstate = '22P06' + + +class InvalidIndicatorParameterValueError(DataError): + sqlstate = '22010' + + +class InvalidParameterValueError(DataError): + sqlstate = '22023' + + +class InvalidPrecedingOrFollowingSizeError(DataError): + sqlstate = '22013' + + +class InvalidRegularExpressionError(DataError): + sqlstate = '2201B' + + +class InvalidRowCountInLimitClauseError(DataError): + sqlstate = '2201W' + + +class InvalidRowCountInResultOffsetClauseError(DataError): + sqlstate = '2201X' + + +class InvalidTablesampleArgumentError(DataError): + sqlstate = '2202H' + + +class InvalidTablesampleRepeatError(DataError): + sqlstate = '2202G' + + +class InvalidTimeZoneDisplacementValueError(DataError): + sqlstate = '22009' + + +class InvalidUseOfEscapeCharacterError(DataError): + sqlstate = '2200C' + + +class MostSpecificTypeMismatchError(DataError): + sqlstate = '2200G' + + +class NullValueNotAllowedError(DataError): + sqlstate = '22004' + + +class NullValueNoIndicatorParameterError(DataError): + sqlstate = '22002' + + +class NumericValueOutOfRangeError(DataError): + sqlstate = '22003' + + +class SequenceGeneratorLimitExceededError(DataError): + sqlstate = '2200H' + + +class StringDataLengthMismatchError(DataError): + sqlstate = '22026' + + +class StringDataRightTruncationError(DataError): + sqlstate = '22001' + + +class SubstringError(DataError): + sqlstate = '22011' + + +class TrimError(DataError): + sqlstate = '22027' + + +class UnterminatedCStringError(DataError): + sqlstate = '22024' + + +class ZeroLengthCharacterStringError(DataError): + sqlstate = '2200F' + + +class PostgresFloatingPointError(DataError): + sqlstate = '22P01' + + +class InvalidTextRepresentationError(DataError): + sqlstate = '22P02' + + +class InvalidBinaryRepresentationError(DataError): + sqlstate = '22P03' + + +class BadCopyFileFormatError(DataError): + sqlstate = '22P04' + + +class UntranslatableCharacterError(DataError): + sqlstate = '22P05' + + +class NotAnXmlDocumentError(DataError): + sqlstate = '2200L' + + +class InvalidXmlDocumentError(DataError): + sqlstate = '2200M' + + +class InvalidXmlContentError(DataError): + sqlstate = '2200N' + + +class InvalidXmlCommentError(DataError): + sqlstate = '2200S' + + +class InvalidXmlProcessingInstructionError(DataError): + sqlstate = '2200T' + + +class DuplicateJsonObjectKeyValueError(DataError): + sqlstate = '22030' + + +class InvalidArgumentForSQLJsonDatetimeFunctionError(DataError): + sqlstate = '22031' + + +class InvalidJsonTextError(DataError): + sqlstate = '22032' + + +class InvalidSQLJsonSubscriptError(DataError): + sqlstate = '22033' + + +class MoreThanOneSQLJsonItemError(DataError): + sqlstate = '22034' + + +class NoSQLJsonItemError(DataError): + sqlstate = '22035' + + +class NonNumericSQLJsonItemError(DataError): + sqlstate = '22036' + + +class NonUniqueKeysInAJsonObjectError(DataError): + sqlstate = '22037' + + +class SingletonSQLJsonItemRequiredError(DataError): + sqlstate = '22038' + + +class SQLJsonArrayNotFoundError(DataError): + sqlstate = '22039' + + +class SQLJsonMemberNotFoundError(DataError): + sqlstate = '2203A' + + +class SQLJsonNumberNotFoundError(DataError): + sqlstate = '2203B' + + +class SQLJsonObjectNotFoundError(DataError): + sqlstate = '2203C' + + +class TooManyJsonArrayElementsError(DataError): + sqlstate = '2203D' + + +class TooManyJsonObjectMembersError(DataError): + sqlstate = '2203E' + + +class SQLJsonScalarRequiredError(DataError): + sqlstate = '2203F' + + +class IntegrityConstraintViolationError(_base.PostgresError): + sqlstate = '23000' + + +class RestrictViolationError(IntegrityConstraintViolationError): + sqlstate = '23001' + + +class NotNullViolationError(IntegrityConstraintViolationError): + sqlstate = '23502' + + +class ForeignKeyViolationError(IntegrityConstraintViolationError): + sqlstate = '23503' + + +class UniqueViolationError(IntegrityConstraintViolationError): + sqlstate = '23505' + + +class CheckViolationError(IntegrityConstraintViolationError): + sqlstate = '23514' + + +class ExclusionViolationError(IntegrityConstraintViolationError): + sqlstate = '23P01' + + +class InvalidCursorStateError(_base.PostgresError): + sqlstate = '24000' + + +class InvalidTransactionStateError(_base.PostgresError): + sqlstate = '25000' + + +class ActiveSQLTransactionError(InvalidTransactionStateError): + sqlstate = '25001' + + +class BranchTransactionAlreadyActiveError(InvalidTransactionStateError): + sqlstate = '25002' + + +class HeldCursorRequiresSameIsolationLevelError(InvalidTransactionStateError): + sqlstate = '25008' + + +class InappropriateAccessModeForBranchTransactionError( + InvalidTransactionStateError): + sqlstate = '25003' + + +class InappropriateIsolationLevelForBranchTransactionError( + InvalidTransactionStateError): + sqlstate = '25004' + + +class NoActiveSQLTransactionForBranchTransactionError( + InvalidTransactionStateError): + sqlstate = '25005' + + +class ReadOnlySQLTransactionError(InvalidTransactionStateError): + sqlstate = '25006' + + +class SchemaAndDataStatementMixingNotSupportedError( + InvalidTransactionStateError): + sqlstate = '25007' + + +class NoActiveSQLTransactionError(InvalidTransactionStateError): + sqlstate = '25P01' + + +class InFailedSQLTransactionError(InvalidTransactionStateError): + sqlstate = '25P02' + + +class IdleInTransactionSessionTimeoutError(InvalidTransactionStateError): + sqlstate = '25P03' + + +class InvalidSQLStatementNameError(_base.PostgresError): + sqlstate = '26000' + + +class TriggeredDataChangeViolationError(_base.PostgresError): + sqlstate = '27000' + + +class InvalidAuthorizationSpecificationError(_base.PostgresError): + sqlstate = '28000' + + +class InvalidPasswordError(InvalidAuthorizationSpecificationError): + sqlstate = '28P01' + + +class DependentPrivilegeDescriptorsStillExistError(_base.PostgresError): + sqlstate = '2B000' + + +class DependentObjectsStillExistError( + DependentPrivilegeDescriptorsStillExistError): + sqlstate = '2BP01' + + +class InvalidTransactionTerminationError(_base.PostgresError): + sqlstate = '2D000' + + +class SQLRoutineError(_base.PostgresError): + sqlstate = '2F000' + + +class FunctionExecutedNoReturnStatementError(SQLRoutineError): + sqlstate = '2F005' + + +class ModifyingSQLDataNotPermittedError(SQLRoutineError): + sqlstate = '2F002' + + +class ProhibitedSQLStatementAttemptedError(SQLRoutineError): + sqlstate = '2F003' + + +class ReadingSQLDataNotPermittedError(SQLRoutineError): + sqlstate = '2F004' + + +class InvalidCursorNameError(_base.PostgresError): + sqlstate = '34000' + + +class ExternalRoutineError(_base.PostgresError): + sqlstate = '38000' + + +class ContainingSQLNotPermittedError(ExternalRoutineError): + sqlstate = '38001' + + +class ModifyingExternalRoutineSQLDataNotPermittedError(ExternalRoutineError): + sqlstate = '38002' + + +class ProhibitedExternalRoutineSQLStatementAttemptedError( + ExternalRoutineError): + sqlstate = '38003' + + +class ReadingExternalRoutineSQLDataNotPermittedError(ExternalRoutineError): + sqlstate = '38004' + + +class ExternalRoutineInvocationError(_base.PostgresError): + sqlstate = '39000' + + +class InvalidSqlstateReturnedError(ExternalRoutineInvocationError): + sqlstate = '39001' + + +class NullValueInExternalRoutineNotAllowedError( + ExternalRoutineInvocationError): + sqlstate = '39004' + + +class TriggerProtocolViolatedError(ExternalRoutineInvocationError): + sqlstate = '39P01' + + +class SrfProtocolViolatedError(ExternalRoutineInvocationError): + sqlstate = '39P02' + + +class EventTriggerProtocolViolatedError(ExternalRoutineInvocationError): + sqlstate = '39P03' + + +class SavepointError(_base.PostgresError): + sqlstate = '3B000' + + +class InvalidSavepointSpecificationError(SavepointError): + sqlstate = '3B001' + + +class InvalidCatalogNameError(_base.PostgresError): + sqlstate = '3D000' + + +class InvalidSchemaNameError(_base.PostgresError): + sqlstate = '3F000' + + +class TransactionRollbackError(_base.PostgresError): + sqlstate = '40000' + + +class TransactionIntegrityConstraintViolationError(TransactionRollbackError): + sqlstate = '40002' + + +class SerializationError(TransactionRollbackError): + sqlstate = '40001' + + +class StatementCompletionUnknownError(TransactionRollbackError): + sqlstate = '40003' + + +class DeadlockDetectedError(TransactionRollbackError): + sqlstate = '40P01' + + +class SyntaxOrAccessError(_base.PostgresError): + sqlstate = '42000' + + +class PostgresSyntaxError(SyntaxOrAccessError): + sqlstate = '42601' + + +class InsufficientPrivilegeError(SyntaxOrAccessError): + sqlstate = '42501' + + +class CannotCoerceError(SyntaxOrAccessError): + sqlstate = '42846' + + +class GroupingError(SyntaxOrAccessError): + sqlstate = '42803' + + +class WindowingError(SyntaxOrAccessError): + sqlstate = '42P20' + + +class InvalidRecursionError(SyntaxOrAccessError): + sqlstate = '42P19' + + +class InvalidForeignKeyError(SyntaxOrAccessError): + sqlstate = '42830' + + +class InvalidNameError(SyntaxOrAccessError): + sqlstate = '42602' + + +class NameTooLongError(SyntaxOrAccessError): + sqlstate = '42622' + + +class ReservedNameError(SyntaxOrAccessError): + sqlstate = '42939' + + +class DatatypeMismatchError(SyntaxOrAccessError): + sqlstate = '42804' + + +class IndeterminateDatatypeError(SyntaxOrAccessError): + sqlstate = '42P18' + + +class CollationMismatchError(SyntaxOrAccessError): + sqlstate = '42P21' + + +class IndeterminateCollationError(SyntaxOrAccessError): + sqlstate = '42P22' + + +class WrongObjectTypeError(SyntaxOrAccessError): + sqlstate = '42809' + + +class GeneratedAlwaysError(SyntaxOrAccessError): + sqlstate = '428C9' + + +class UndefinedColumnError(SyntaxOrAccessError): + sqlstate = '42703' + + +class UndefinedFunctionError(SyntaxOrAccessError): + sqlstate = '42883' + + +class UndefinedTableError(SyntaxOrAccessError): + sqlstate = '42P01' + + +class UndefinedParameterError(SyntaxOrAccessError): + sqlstate = '42P02' + + +class UndefinedObjectError(SyntaxOrAccessError): + sqlstate = '42704' + + +class DuplicateColumnError(SyntaxOrAccessError): + sqlstate = '42701' + + +class DuplicateCursorError(SyntaxOrAccessError): + sqlstate = '42P03' + + +class DuplicateDatabaseError(SyntaxOrAccessError): + sqlstate = '42P04' + + +class DuplicateFunctionError(SyntaxOrAccessError): + sqlstate = '42723' + + +class DuplicatePreparedStatementError(SyntaxOrAccessError): + sqlstate = '42P05' + + +class DuplicateSchemaError(SyntaxOrAccessError): + sqlstate = '42P06' + + +class DuplicateTableError(SyntaxOrAccessError): + sqlstate = '42P07' + + +class DuplicateAliasError(SyntaxOrAccessError): + sqlstate = '42712' + + +class DuplicateObjectError(SyntaxOrAccessError): + sqlstate = '42710' + + +class AmbiguousColumnError(SyntaxOrAccessError): + sqlstate = '42702' + + +class AmbiguousFunctionError(SyntaxOrAccessError): + sqlstate = '42725' + + +class AmbiguousParameterError(SyntaxOrAccessError): + sqlstate = '42P08' + + +class AmbiguousAliasError(SyntaxOrAccessError): + sqlstate = '42P09' + + +class InvalidColumnReferenceError(SyntaxOrAccessError): + sqlstate = '42P10' + + +class InvalidColumnDefinitionError(SyntaxOrAccessError): + sqlstate = '42611' + + +class InvalidCursorDefinitionError(SyntaxOrAccessError): + sqlstate = '42P11' + + +class InvalidDatabaseDefinitionError(SyntaxOrAccessError): + sqlstate = '42P12' + + +class InvalidFunctionDefinitionError(SyntaxOrAccessError): + sqlstate = '42P13' + + +class InvalidPreparedStatementDefinitionError(SyntaxOrAccessError): + sqlstate = '42P14' + + +class InvalidSchemaDefinitionError(SyntaxOrAccessError): + sqlstate = '42P15' + + +class InvalidTableDefinitionError(SyntaxOrAccessError): + sqlstate = '42P16' + + +class InvalidObjectDefinitionError(SyntaxOrAccessError): + sqlstate = '42P17' + + +class WithCheckOptionViolationError(_base.PostgresError): + sqlstate = '44000' + + +class InsufficientResourcesError(_base.PostgresError): + sqlstate = '53000' + + +class DiskFullError(InsufficientResourcesError): + sqlstate = '53100' + + +class OutOfMemoryError(InsufficientResourcesError): + sqlstate = '53200' + + +class TooManyConnectionsError(InsufficientResourcesError): + sqlstate = '53300' + + +class ConfigurationLimitExceededError(InsufficientResourcesError): + sqlstate = '53400' + + +class ProgramLimitExceededError(_base.PostgresError): + sqlstate = '54000' + + +class StatementTooComplexError(ProgramLimitExceededError): + sqlstate = '54001' + + +class TooManyColumnsError(ProgramLimitExceededError): + sqlstate = '54011' + + +class TooManyArgumentsError(ProgramLimitExceededError): + sqlstate = '54023' + + +class ObjectNotInPrerequisiteStateError(_base.PostgresError): + sqlstate = '55000' + + +class ObjectInUseError(ObjectNotInPrerequisiteStateError): + sqlstate = '55006' + + +class CantChangeRuntimeParamError(ObjectNotInPrerequisiteStateError): + sqlstate = '55P02' + + +class LockNotAvailableError(ObjectNotInPrerequisiteStateError): + sqlstate = '55P03' + + +class UnsafeNewEnumValueUsageError(ObjectNotInPrerequisiteStateError): + sqlstate = '55P04' + + +class OperatorInterventionError(_base.PostgresError): + sqlstate = '57000' + + +class QueryCanceledError(OperatorInterventionError): + sqlstate = '57014' + + +class AdminShutdownError(OperatorInterventionError): + sqlstate = '57P01' + + +class CrashShutdownError(OperatorInterventionError): + sqlstate = '57P02' + + +class CannotConnectNowError(OperatorInterventionError): + sqlstate = '57P03' + + +class DatabaseDroppedError(OperatorInterventionError): + sqlstate = '57P04' + + +class IdleSessionTimeoutError(OperatorInterventionError): + sqlstate = '57P05' + + +class PostgresSystemError(_base.PostgresError): + sqlstate = '58000' + + +class PostgresIOError(PostgresSystemError): + sqlstate = '58030' + + +class UndefinedFileError(PostgresSystemError): + sqlstate = '58P01' + + +class DuplicateFileError(PostgresSystemError): + sqlstate = '58P02' + + +class SnapshotTooOldError(_base.PostgresError): + sqlstate = '72000' + + +class ConfigFileError(_base.PostgresError): + sqlstate = 'F0000' + + +class LockFileExistsError(ConfigFileError): + sqlstate = 'F0001' + + +class FDWError(_base.PostgresError): + sqlstate = 'HV000' + + +class FDWColumnNameNotFoundError(FDWError): + sqlstate = 'HV005' + + +class FDWDynamicParameterValueNeededError(FDWError): + sqlstate = 'HV002' + + +class FDWFunctionSequenceError(FDWError): + sqlstate = 'HV010' + + +class FDWInconsistentDescriptorInformationError(FDWError): + sqlstate = 'HV021' + + +class FDWInvalidAttributeValueError(FDWError): + sqlstate = 'HV024' + + +class FDWInvalidColumnNameError(FDWError): + sqlstate = 'HV007' + + +class FDWInvalidColumnNumberError(FDWError): + sqlstate = 'HV008' + + +class FDWInvalidDataTypeError(FDWError): + sqlstate = 'HV004' + + +class FDWInvalidDataTypeDescriptorsError(FDWError): + sqlstate = 'HV006' + + +class FDWInvalidDescriptorFieldIdentifierError(FDWError): + sqlstate = 'HV091' + + +class FDWInvalidHandleError(FDWError): + sqlstate = 'HV00B' + + +class FDWInvalidOptionIndexError(FDWError): + sqlstate = 'HV00C' + + +class FDWInvalidOptionNameError(FDWError): + sqlstate = 'HV00D' + + +class FDWInvalidStringLengthOrBufferLengthError(FDWError): + sqlstate = 'HV090' + + +class FDWInvalidStringFormatError(FDWError): + sqlstate = 'HV00A' + + +class FDWInvalidUseOfNullPointerError(FDWError): + sqlstate = 'HV009' + + +class FDWTooManyHandlesError(FDWError): + sqlstate = 'HV014' + + +class FDWOutOfMemoryError(FDWError): + sqlstate = 'HV001' + + +class FDWNoSchemasError(FDWError): + sqlstate = 'HV00P' + + +class FDWOptionNameNotFoundError(FDWError): + sqlstate = 'HV00J' + + +class FDWReplyHandleError(FDWError): + sqlstate = 'HV00K' + + +class FDWSchemaNotFoundError(FDWError): + sqlstate = 'HV00Q' + + +class FDWTableNotFoundError(FDWError): + sqlstate = 'HV00R' + + +class FDWUnableToCreateExecutionError(FDWError): + sqlstate = 'HV00L' + + +class FDWUnableToCreateReplyError(FDWError): + sqlstate = 'HV00M' + + +class FDWUnableToEstablishConnectionError(FDWError): + sqlstate = 'HV00N' + + +class PLPGSQLError(_base.PostgresError): + sqlstate = 'P0000' + + +class RaiseError(PLPGSQLError): + sqlstate = 'P0001' + + +class NoDataFoundError(PLPGSQLError): + sqlstate = 'P0002' + + +class TooManyRowsError(PLPGSQLError): + sqlstate = 'P0003' + + +class AssertError(PLPGSQLError): + sqlstate = 'P0004' + + +class InternalServerError(_base.PostgresError): + sqlstate = 'XX000' + + +class DataCorruptedError(InternalServerError): + sqlstate = 'XX001' + + +class IndexCorruptedError(InternalServerError): + sqlstate = 'XX002' + + +__all__ = ( + 'ActiveSQLTransactionError', 'AdminShutdownError', + 'AmbiguousAliasError', 'AmbiguousColumnError', + 'AmbiguousFunctionError', 'AmbiguousParameterError', + 'ArraySubscriptError', 'AssertError', 'BadCopyFileFormatError', + 'BranchTransactionAlreadyActiveError', 'CannotCoerceError', + 'CannotConnectNowError', 'CantChangeRuntimeParamError', + 'CardinalityViolationError', 'CaseNotFoundError', + 'CharacterNotInRepertoireError', 'CheckViolationError', + 'ClientCannotConnectError', 'CollationMismatchError', + 'ConfigFileError', 'ConfigurationLimitExceededError', + 'ConnectionDoesNotExistError', 'ConnectionFailureError', + 'ConnectionRejectionError', 'ContainingSQLNotPermittedError', + 'CrashShutdownError', 'DataCorruptedError', 'DataError', + 'DatabaseDroppedError', 'DatatypeMismatchError', + 'DatetimeFieldOverflowError', 'DeadlockDetectedError', + 'DependentObjectsStillExistError', + 'DependentPrivilegeDescriptorsStillExistError', 'DeprecatedFeature', + 'DiagnosticsError', 'DiskFullError', 'DivisionByZeroError', + 'DuplicateAliasError', 'DuplicateColumnError', 'DuplicateCursorError', + 'DuplicateDatabaseError', 'DuplicateFileError', + 'DuplicateFunctionError', 'DuplicateJsonObjectKeyValueError', + 'DuplicateObjectError', 'DuplicatePreparedStatementError', + 'DuplicateSchemaError', 'DuplicateTableError', + 'DynamicResultSetsReturned', 'ErrorInAssignmentError', + 'EscapeCharacterConflictError', 'EventTriggerProtocolViolatedError', + 'ExclusionViolationError', 'ExternalRoutineError', + 'ExternalRoutineInvocationError', 'FDWColumnNameNotFoundError', + 'FDWDynamicParameterValueNeededError', 'FDWError', + 'FDWFunctionSequenceError', + 'FDWInconsistentDescriptorInformationError', + 'FDWInvalidAttributeValueError', 'FDWInvalidColumnNameError', + 'FDWInvalidColumnNumberError', 'FDWInvalidDataTypeDescriptorsError', + 'FDWInvalidDataTypeError', 'FDWInvalidDescriptorFieldIdentifierError', + 'FDWInvalidHandleError', 'FDWInvalidOptionIndexError', + 'FDWInvalidOptionNameError', 'FDWInvalidStringFormatError', + 'FDWInvalidStringLengthOrBufferLengthError', + 'FDWInvalidUseOfNullPointerError', 'FDWNoSchemasError', + 'FDWOptionNameNotFoundError', 'FDWOutOfMemoryError', + 'FDWReplyHandleError', 'FDWSchemaNotFoundError', + 'FDWTableNotFoundError', 'FDWTooManyHandlesError', + 'FDWUnableToCreateExecutionError', 'FDWUnableToCreateReplyError', + 'FDWUnableToEstablishConnectionError', 'FeatureNotSupportedError', + 'ForeignKeyViolationError', 'FunctionExecutedNoReturnStatementError', + 'GeneratedAlwaysError', 'GroupingError', + 'HeldCursorRequiresSameIsolationLevelError', + 'IdleInTransactionSessionTimeoutError', 'IdleSessionTimeoutError', + 'ImplicitZeroBitPadding', 'InFailedSQLTransactionError', + 'InappropriateAccessModeForBranchTransactionError', + 'InappropriateIsolationLevelForBranchTransactionError', + 'IndeterminateCollationError', 'IndeterminateDatatypeError', + 'IndexCorruptedError', 'IndicatorOverflowError', + 'InsufficientPrivilegeError', 'InsufficientResourcesError', + 'IntegrityConstraintViolationError', 'InternalServerError', + 'IntervalFieldOverflowError', 'InvalidArgumentForLogarithmError', + 'InvalidArgumentForNthValueFunctionError', + 'InvalidArgumentForNtileFunctionError', + 'InvalidArgumentForPowerFunctionError', + 'InvalidArgumentForSQLJsonDatetimeFunctionError', + 'InvalidArgumentForWidthBucketFunctionError', + 'InvalidAuthorizationSpecificationError', + 'InvalidBinaryRepresentationError', 'InvalidCachedStatementError', + 'InvalidCatalogNameError', 'InvalidCharacterValueForCastError', + 'InvalidColumnDefinitionError', 'InvalidColumnReferenceError', + 'InvalidCursorDefinitionError', 'InvalidCursorNameError', + 'InvalidCursorStateError', 'InvalidDatabaseDefinitionError', + 'InvalidDatetimeFormatError', 'InvalidEscapeCharacterError', + 'InvalidEscapeOctetError', 'InvalidEscapeSequenceError', + 'InvalidForeignKeyError', 'InvalidFunctionDefinitionError', + 'InvalidGrantOperationError', 'InvalidGrantorError', + 'InvalidIndicatorParameterValueError', 'InvalidJsonTextError', + 'InvalidLocatorSpecificationError', 'InvalidNameError', + 'InvalidObjectDefinitionError', 'InvalidParameterValueError', + 'InvalidPasswordError', 'InvalidPrecedingOrFollowingSizeError', + 'InvalidPreparedStatementDefinitionError', 'InvalidRecursionError', + 'InvalidRegularExpressionError', 'InvalidRoleSpecificationError', + 'InvalidRowCountInLimitClauseError', + 'InvalidRowCountInResultOffsetClauseError', + 'InvalidSQLJsonSubscriptError', 'InvalidSQLStatementNameError', + 'InvalidSavepointSpecificationError', 'InvalidSchemaDefinitionError', + 'InvalidSchemaNameError', 'InvalidSqlstateReturnedError', + 'InvalidTableDefinitionError', 'InvalidTablesampleArgumentError', + 'InvalidTablesampleRepeatError', 'InvalidTextRepresentationError', + 'InvalidTimeZoneDisplacementValueError', + 'InvalidTransactionInitiationError', 'InvalidTransactionStateError', + 'InvalidTransactionTerminationError', + 'InvalidUseOfEscapeCharacterError', 'InvalidXmlCommentError', + 'InvalidXmlContentError', 'InvalidXmlDocumentError', + 'InvalidXmlProcessingInstructionError', 'LocatorError', + 'LockFileExistsError', 'LockNotAvailableError', + 'ModifyingExternalRoutineSQLDataNotPermittedError', + 'ModifyingSQLDataNotPermittedError', 'MoreThanOneSQLJsonItemError', + 'MostSpecificTypeMismatchError', 'NameTooLongError', + 'NoActiveSQLTransactionError', + 'NoActiveSQLTransactionForBranchTransactionError', + 'NoAdditionalDynamicResultSetsReturned', 'NoData', 'NoDataFoundError', + 'NoSQLJsonItemError', 'NonNumericSQLJsonItemError', + 'NonUniqueKeysInAJsonObjectError', + 'NonstandardUseOfEscapeCharacterError', 'NotAnXmlDocumentError', + 'NotNullViolationError', 'NullValueEliminatedInSetFunction', + 'NullValueInExternalRoutineNotAllowedError', + 'NullValueNoIndicatorParameterError', 'NullValueNotAllowedError', + 'NumericValueOutOfRangeError', 'ObjectInUseError', + 'ObjectNotInPrerequisiteStateError', 'OperatorInterventionError', + 'OutOfMemoryError', 'PLPGSQLError', 'PostgresConnectionError', + 'PostgresFloatingPointError', 'PostgresIOError', + 'PostgresSyntaxError', 'PostgresSystemError', 'PostgresWarning', + 'PrivilegeNotGranted', 'PrivilegeNotRevoked', + 'ProgramLimitExceededError', + 'ProhibitedExternalRoutineSQLStatementAttemptedError', + 'ProhibitedSQLStatementAttemptedError', 'ProtocolViolationError', + 'QueryCanceledError', 'RaiseError', 'ReadOnlySQLTransactionError', + 'ReadingExternalRoutineSQLDataNotPermittedError', + 'ReadingSQLDataNotPermittedError', 'ReservedNameError', + 'RestrictViolationError', 'SQLJsonArrayNotFoundError', + 'SQLJsonMemberNotFoundError', 'SQLJsonNumberNotFoundError', + 'SQLJsonObjectNotFoundError', 'SQLJsonScalarRequiredError', + 'SQLRoutineError', 'SQLStatementNotYetCompleteError', + 'SavepointError', 'SchemaAndDataStatementMixingNotSupportedError', + 'SequenceGeneratorLimitExceededError', 'SerializationError', + 'SingletonSQLJsonItemRequiredError', 'SnapshotTooOldError', + 'SrfProtocolViolatedError', + 'StackedDiagnosticsAccessedWithoutActiveHandlerError', + 'StatementCompletionUnknownError', 'StatementTooComplexError', + 'StringDataLengthMismatchError', 'StringDataRightTruncation', + 'StringDataRightTruncationError', 'SubstringError', + 'SyntaxOrAccessError', 'TooManyArgumentsError', 'TooManyColumnsError', + 'TooManyConnectionsError', 'TooManyJsonArrayElementsError', + 'TooManyJsonObjectMembersError', 'TooManyRowsError', + 'TransactionIntegrityConstraintViolationError', + 'TransactionResolutionUnknownError', 'TransactionRollbackError', + 'TriggerProtocolViolatedError', 'TriggeredActionError', + 'TriggeredDataChangeViolationError', 'TrimError', + 'UndefinedColumnError', 'UndefinedFileError', + 'UndefinedFunctionError', 'UndefinedObjectError', + 'UndefinedParameterError', 'UndefinedTableError', + 'UniqueViolationError', 'UnsafeNewEnumValueUsageError', + 'UnterminatedCStringError', 'UntranslatableCharacterError', + 'WindowingError', 'WithCheckOptionViolationError', + 'WrongObjectTypeError', 'ZeroLengthCharacterStringError' +) + +__all__ += _base.__all__ diff --git a/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..97e71ab Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/_base.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/_base.cpython-310.pyc new file mode 100644 index 0000000..40d5c7a Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/exceptions/__pycache__/_base.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/exceptions/_base.py b/sbsheriff/Lib/site-packages/asyncpg/exceptions/_base.py new file mode 100644 index 0000000..783b5eb --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/exceptions/_base.py @@ -0,0 +1,285 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncpg +import sys +import textwrap + + +__all__ = ('PostgresError', 'FatalPostgresError', 'UnknownPostgresError', + 'InterfaceError', 'InterfaceWarning', 'PostgresLogMessage', + 'InternalClientError', 'OutdatedSchemaCacheError', 'ProtocolError', + 'UnsupportedClientFeatureError') + + +def _is_asyncpg_class(cls): + modname = cls.__module__ + return modname == 'asyncpg' or modname.startswith('asyncpg.') + + +class PostgresMessageMeta(type): + + _message_map = {} + _field_map = { + 'S': 'severity', + 'V': 'severity_en', + 'C': 'sqlstate', + 'M': 'message', + 'D': 'detail', + 'H': 'hint', + 'P': 'position', + 'p': 'internal_position', + 'q': 'internal_query', + 'W': 'context', + 's': 'schema_name', + 't': 'table_name', + 'c': 'column_name', + 'd': 'data_type_name', + 'n': 'constraint_name', + 'F': 'server_source_filename', + 'L': 'server_source_line', + 'R': 'server_source_function' + } + + def __new__(mcls, name, bases, dct): + cls = super().__new__(mcls, name, bases, dct) + if cls.__module__ == mcls.__module__ and name == 'PostgresMessage': + for f in mcls._field_map.values(): + setattr(cls, f, None) + + if _is_asyncpg_class(cls): + mod = sys.modules[cls.__module__] + if hasattr(mod, name): + raise RuntimeError('exception class redefinition: {}'.format( + name)) + + code = dct.get('sqlstate') + if code is not None: + existing = mcls._message_map.get(code) + if existing is not None: + raise TypeError('{} has duplicate SQLSTATE code, which is' + 'already defined by {}'.format( + name, existing.__name__)) + mcls._message_map[code] = cls + + return cls + + @classmethod + def get_message_class_for_sqlstate(mcls, code): + return mcls._message_map.get(code, UnknownPostgresError) + + +class PostgresMessage(metaclass=PostgresMessageMeta): + + @classmethod + def _get_error_class(cls, fields): + sqlstate = fields.get('C') + return type(cls).get_message_class_for_sqlstate(sqlstate) + + @classmethod + def _get_error_dict(cls, fields, query): + dct = { + 'query': query + } + + field_map = type(cls)._field_map + for k, v in fields.items(): + field = field_map.get(k) + if field: + dct[field] = v + + return dct + + @classmethod + def _make_constructor(cls, fields, query=None): + dct = cls._get_error_dict(fields, query) + + exccls = cls._get_error_class(fields) + message = dct.get('message', '') + + # PostgreSQL will raise an exception when it detects + # that the result type of the query has changed from + # when the statement was prepared. + # + # The original error is somewhat cryptic and unspecific, + # so we raise a custom subclass that is easier to handle + # and identify. + # + # Note that we specifically do not rely on the error + # message, as it is localizable. + is_icse = ( + exccls.__name__ == 'FeatureNotSupportedError' and + _is_asyncpg_class(exccls) and + dct.get('server_source_function') == 'RevalidateCachedQuery' + ) + + if is_icse: + exceptions = sys.modules[exccls.__module__] + exccls = exceptions.InvalidCachedStatementError + message = ('cached statement plan is invalid due to a database ' + 'schema or configuration change') + + is_prepared_stmt_error = ( + exccls.__name__ in ('DuplicatePreparedStatementError', + 'InvalidSQLStatementNameError') and + _is_asyncpg_class(exccls) + ) + + if is_prepared_stmt_error: + hint = dct.get('hint', '') + hint += textwrap.dedent("""\ + + NOTE: pgbouncer with pool_mode set to "transaction" or + "statement" does not support prepared statements properly. + You have two options: + + * if you are using pgbouncer for connection pooling to a + single server, switch to the connection pool functionality + provided by asyncpg, it is a much better option for this + purpose; + + * if you have no option of avoiding the use of pgbouncer, + then you can set statement_cache_size to 0 when creating + the asyncpg connection object. + """) + + dct['hint'] = hint + + return exccls, message, dct + + def as_dict(self): + dct = {} + for f in type(self)._field_map.values(): + val = getattr(self, f) + if val is not None: + dct[f] = val + return dct + + +class PostgresError(PostgresMessage, Exception): + """Base class for all Postgres errors.""" + + def __str__(self): + msg = self.args[0] + if self.detail: + msg += '\nDETAIL: {}'.format(self.detail) + if self.hint: + msg += '\nHINT: {}'.format(self.hint) + + return msg + + @classmethod + def new(cls, fields, query=None): + exccls, message, dct = cls._make_constructor(fields, query) + ex = exccls(message) + ex.__dict__.update(dct) + return ex + + +class FatalPostgresError(PostgresError): + """A fatal error that should result in server disconnection.""" + + +class UnknownPostgresError(FatalPostgresError): + """An error with an unknown SQLSTATE code.""" + + +class InterfaceMessage: + def __init__(self, *, detail=None, hint=None): + self.detail = detail + self.hint = hint + + def __str__(self): + msg = self.args[0] + if self.detail: + msg += '\nDETAIL: {}'.format(self.detail) + if self.hint: + msg += '\nHINT: {}'.format(self.hint) + + return msg + + +class InterfaceError(InterfaceMessage, Exception): + """An error caused by improper use of asyncpg API.""" + + def __init__(self, msg, *, detail=None, hint=None): + InterfaceMessage.__init__(self, detail=detail, hint=hint) + Exception.__init__(self, msg) + + def with_msg(self, msg): + return type(self)( + msg, + detail=self.detail, + hint=self.hint, + ).with_traceback( + self.__traceback__ + ) + + +class DataError(InterfaceError, ValueError): + """An error caused by invalid query input.""" + + +class UnsupportedClientFeatureError(InterfaceError): + """Requested feature is unsupported by asyncpg.""" + + +class InterfaceWarning(InterfaceMessage, UserWarning): + """A warning caused by an improper use of asyncpg API.""" + + def __init__(self, msg, *, detail=None, hint=None): + InterfaceMessage.__init__(self, detail=detail, hint=hint) + UserWarning.__init__(self, msg) + + +class InternalClientError(Exception): + """All unexpected errors not classified otherwise.""" + + +class ProtocolError(InternalClientError): + """Unexpected condition in the handling of PostgreSQL protocol input.""" + + +class OutdatedSchemaCacheError(InternalClientError): + """A value decoding error caused by a schema change before row fetching.""" + + def __init__(self, msg, *, schema=None, data_type=None, position=None): + super().__init__(msg) + self.schema_name = schema + self.data_type_name = data_type + self.position = position + + +class PostgresLogMessage(PostgresMessage): + """A base class for non-error server messages.""" + + def __str__(self): + return '{}: {}'.format(type(self).__name__, self.message) + + def __setattr__(self, name, val): + raise TypeError('instances of {} are immutable'.format( + type(self).__name__)) + + @classmethod + def new(cls, fields, query=None): + exccls, message_text, dct = cls._make_constructor(fields, query) + + if exccls is UnknownPostgresError: + exccls = PostgresLogMessage + + if exccls is PostgresLogMessage: + severity = dct.get('severity_en') or dct.get('severity') + if severity and severity.upper() == 'WARNING': + exccls = asyncpg.PostgresWarning + + if issubclass(exccls, (BaseException, Warning)): + msg = exccls(message_text) + else: + msg = exccls() + + msg.__dict__.update(dct) + return msg diff --git a/sbsheriff/Lib/site-packages/asyncpg/introspection.py b/sbsheriff/Lib/site-packages/asyncpg/introspection.py new file mode 100644 index 0000000..d62f39a --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/introspection.py @@ -0,0 +1,288 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +_TYPEINFO_13 = '''\ + ( + SELECT + t.oid AS oid, + ns.nspname AS ns, + t.typname AS name, + t.typtype AS kind, + (CASE WHEN t.typtype = 'd' THEN + (WITH RECURSIVE typebases(oid, depth) AS ( + SELECT + t2.typbasetype AS oid, + 0 AS depth + FROM + pg_type t2 + WHERE + t2.oid = t.oid + + UNION ALL + + SELECT + t2.typbasetype AS oid, + tb.depth + 1 AS depth + FROM + pg_type t2, + typebases tb + WHERE + tb.oid = t2.oid + AND t2.typbasetype != 0 + ) SELECT oid FROM typebases ORDER BY depth DESC LIMIT 1) + + ELSE NULL + END) AS basetype, + t.typelem AS elemtype, + elem_t.typdelim AS elemdelim, + range_t.rngsubtype AS range_subtype, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.atttypid ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrtypoids, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.attname::text ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrnames + FROM + pg_catalog.pg_type AS t + INNER JOIN pg_catalog.pg_namespace ns ON ( + ns.oid = t.typnamespace) + LEFT JOIN pg_type elem_t ON ( + t.typlen = -1 AND + t.typelem != 0 AND + t.typelem = elem_t.oid + ) + LEFT JOIN pg_range range_t ON ( + t.oid = range_t.rngtypid + ) + ) +''' + + +INTRO_LOOKUP_TYPES_13 = '''\ +WITH RECURSIVE typeinfo_tree( + oid, ns, name, kind, basetype, elemtype, elemdelim, + range_subtype, attrtypoids, attrnames, depth) +AS ( + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, 0 + FROM + {typeinfo} AS ti + WHERE + ti.oid = any($1::oid[]) + + UNION ALL + + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, tt.depth + 1 + FROM + {typeinfo} ti, + typeinfo_tree tt + WHERE + (tt.elemtype IS NOT NULL AND ti.oid = tt.elemtype) + OR (tt.attrtypoids IS NOT NULL AND ti.oid = any(tt.attrtypoids)) + OR (tt.range_subtype IS NOT NULL AND ti.oid = tt.range_subtype) + OR (tt.basetype IS NOT NULL AND ti.oid = tt.basetype) +) + +SELECT DISTINCT + *, + basetype::regtype::text AS basetype_name, + elemtype::regtype::text AS elemtype_name, + range_subtype::regtype::text AS range_subtype_name +FROM + typeinfo_tree +ORDER BY + depth DESC +'''.format(typeinfo=_TYPEINFO_13) + + +_TYPEINFO = '''\ + ( + SELECT + t.oid AS oid, + ns.nspname AS ns, + t.typname AS name, + t.typtype AS kind, + (CASE WHEN t.typtype = 'd' THEN + (WITH RECURSIVE typebases(oid, depth) AS ( + SELECT + t2.typbasetype AS oid, + 0 AS depth + FROM + pg_type t2 + WHERE + t2.oid = t.oid + + UNION ALL + + SELECT + t2.typbasetype AS oid, + tb.depth + 1 AS depth + FROM + pg_type t2, + typebases tb + WHERE + tb.oid = t2.oid + AND t2.typbasetype != 0 + ) SELECT oid FROM typebases ORDER BY depth DESC LIMIT 1) + + ELSE NULL + END) AS basetype, + t.typelem AS elemtype, + elem_t.typdelim AS elemdelim, + COALESCE( + range_t.rngsubtype, + multirange_t.rngsubtype) AS range_subtype, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.atttypid ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrtypoids, + (CASE WHEN t.typtype = 'c' THEN + (SELECT + array_agg(ia.attname::text ORDER BY ia.attnum) + FROM + pg_attribute ia + INNER JOIN pg_class c + ON (ia.attrelid = c.oid) + WHERE + ia.attnum > 0 AND NOT ia.attisdropped + AND c.reltype = t.oid) + + ELSE NULL + END) AS attrnames + FROM + pg_catalog.pg_type AS t + INNER JOIN pg_catalog.pg_namespace ns ON ( + ns.oid = t.typnamespace) + LEFT JOIN pg_type elem_t ON ( + t.typlen = -1 AND + t.typelem != 0 AND + t.typelem = elem_t.oid + ) + LEFT JOIN pg_range range_t ON ( + t.oid = range_t.rngtypid + ) + LEFT JOIN pg_range multirange_t ON ( + t.oid = multirange_t.rngmultitypid + ) + ) +''' + + +INTRO_LOOKUP_TYPES = '''\ +WITH RECURSIVE typeinfo_tree( + oid, ns, name, kind, basetype, elemtype, elemdelim, + range_subtype, attrtypoids, attrnames, depth) +AS ( + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, 0 + FROM + {typeinfo} AS ti + WHERE + ti.oid = any($1::oid[]) + + UNION ALL + + SELECT + ti.oid, ti.ns, ti.name, ti.kind, ti.basetype, + ti.elemtype, ti.elemdelim, ti.range_subtype, + ti.attrtypoids, ti.attrnames, tt.depth + 1 + FROM + {typeinfo} ti, + typeinfo_tree tt + WHERE + (tt.elemtype IS NOT NULL AND ti.oid = tt.elemtype) + OR (tt.attrtypoids IS NOT NULL AND ti.oid = any(tt.attrtypoids)) + OR (tt.range_subtype IS NOT NULL AND ti.oid = tt.range_subtype) + OR (tt.basetype IS NOT NULL AND ti.oid = tt.basetype) +) + +SELECT DISTINCT + *, + basetype::regtype::text AS basetype_name, + elemtype::regtype::text AS elemtype_name, + range_subtype::regtype::text AS range_subtype_name +FROM + typeinfo_tree +ORDER BY + depth DESC +'''.format(typeinfo=_TYPEINFO) + + +TYPE_BY_NAME = '''\ +SELECT + t.oid, + t.typelem AS elemtype, + t.typtype AS kind +FROM + pg_catalog.pg_type AS t + INNER JOIN pg_catalog.pg_namespace ns ON (ns.oid = t.typnamespace) +WHERE + t.typname = $1 AND ns.nspname = $2 +''' + + +TYPE_BY_OID = '''\ +SELECT + t.oid, + t.typelem AS elemtype, + t.typtype AS kind +FROM + pg_catalog.pg_type AS t +WHERE + t.oid = $1 +''' + + +# 'b' for a base type, 'd' for a domain, 'e' for enum. +SCALAR_TYPE_KINDS = (b'b', b'd', b'e') + + +def is_scalar_type(typeinfo) -> bool: + return ( + typeinfo['kind'] in SCALAR_TYPE_KINDS and + not typeinfo['elemtype'] + ) + + +def is_domain_type(typeinfo) -> bool: + return typeinfo['kind'] == b'd' diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.pxd new file mode 100644 index 0000000..1df403c --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.pxd @@ -0,0 +1,5 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.py new file mode 100644 index 0000000..1df403c --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..da203d6 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/types.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000..a80f59a Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/pgproto/__pycache__/types.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pxd new file mode 100644 index 0000000..c2d4c6e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pxd @@ -0,0 +1,136 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef class WriteBuffer: + cdef: + # Preallocated small buffer + bint _smallbuf_inuse + char _smallbuf[_BUFFER_INITIAL_SIZE] + + char *_buf + + # Allocated size + ssize_t _size + + # Length of data in the buffer + ssize_t _length + + # Number of memoryviews attached to the buffer + int _view_count + + # True is start_message was used + bint _message_mode + + cdef inline len(self): + return self._length + + cdef inline write_len_prefixed_utf8(self, str s): + return self.write_len_prefixed_bytes(s.encode('utf-8')) + + cdef inline _check_readonly(self) + cdef inline _ensure_alloced(self, ssize_t extra_length) + cdef _reallocate(self, ssize_t new_size) + cdef inline reset(self) + cdef inline start_message(self, char type) + cdef inline end_message(self) + cdef write_buffer(self, WriteBuffer buf) + cdef write_byte(self, char b) + cdef write_bytes(self, bytes data) + cdef write_len_prefixed_buffer(self, WriteBuffer buf) + cdef write_len_prefixed_bytes(self, bytes data) + cdef write_bytestring(self, bytes string) + cdef write_str(self, str string, str encoding) + cdef write_frbuf(self, FRBuffer *buf) + cdef write_cstr(self, const char *data, ssize_t len) + cdef write_int16(self, int16_t i) + cdef write_int32(self, int32_t i) + cdef write_int64(self, int64_t i) + cdef write_float(self, float f) + cdef write_double(self, double d) + + @staticmethod + cdef WriteBuffer new_message(char type) + + @staticmethod + cdef WriteBuffer new() + + +ctypedef const char * (*try_consume_message_method)(object, ssize_t*) +ctypedef int32_t (*take_message_type_method)(object, char) except -1 +ctypedef int32_t (*take_message_method)(object) except -1 +ctypedef char (*get_message_type_method)(object) + + +cdef class ReadBuffer: + cdef: + # A deque of buffers (bytes objects) + object _bufs + object _bufs_append + object _bufs_popleft + + # A pointer to the first buffer in `_bufs` + bytes _buf0 + + # A pointer to the previous first buffer + # (used to prolong the life of _buf0 when using + # methods like _try_read_bytes) + bytes _buf0_prev + + # Number of buffers in `_bufs` + int32_t _bufs_len + + # A read position in the first buffer in `_bufs` + ssize_t _pos0 + + # Length of the first buffer in `_bufs` + ssize_t _len0 + + # A total number of buffered bytes in ReadBuffer + ssize_t _length + + char _current_message_type + int32_t _current_message_len + ssize_t _current_message_len_unread + bint _current_message_ready + + cdef inline len(self): + return self._length + + cdef inline char get_message_type(self): + return self._current_message_type + + cdef inline int32_t get_message_length(self): + return self._current_message_len + + cdef feed_data(self, data) + cdef inline _ensure_first_buf(self) + cdef _switch_to_next_buf(self) + cdef inline char read_byte(self) except? -1 + cdef inline const char* _try_read_bytes(self, ssize_t nbytes) + cdef inline _read_into(self, char *buf, ssize_t nbytes) + cdef inline _read_and_discard(self, ssize_t nbytes) + cdef bytes read_bytes(self, ssize_t nbytes) + cdef bytes read_len_prefixed_bytes(self) + cdef str read_len_prefixed_utf8(self) + cdef read_uuid(self) + cdef inline int64_t read_int64(self) except? -1 + cdef inline int32_t read_int32(self) except? -1 + cdef inline int16_t read_int16(self) except? -1 + cdef inline read_null_str(self) + cdef int32_t take_message(self) except -1 + cdef inline int32_t take_message_type(self, char mtype) except -1 + cdef int32_t put_message(self) except -1 + cdef inline const char* try_consume_message(self, ssize_t* len) + cdef bytes consume_message(self) + cdef discard_message(self) + cdef redirect_messages(self, WriteBuffer buf, char mtype, int stop_at=?) + cdef bytearray consume_messages(self, char mtype) + cdef finish_message(self) + cdef inline _finish_message(self) + + @staticmethod + cdef ReadBuffer new_message_parser(object data) diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pyx b/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pyx new file mode 100644 index 0000000..e05d4c7 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/buffer.pyx @@ -0,0 +1,817 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from libc.string cimport memcpy + +import collections + +class BufferError(Exception): + pass + +@cython.no_gc_clear +@cython.final +@cython.freelist(_BUFFER_FREELIST_SIZE) +cdef class WriteBuffer: + + def __cinit__(self): + self._smallbuf_inuse = True + self._buf = self._smallbuf + self._size = _BUFFER_INITIAL_SIZE + self._length = 0 + self._message_mode = 0 + + def __dealloc__(self): + if self._buf is not NULL and not self._smallbuf_inuse: + cpython.PyMem_Free(self._buf) + self._buf = NULL + self._size = 0 + + if self._view_count: + raise BufferError( + 'Deallocating buffer with attached memoryviews') + + def __getbuffer__(self, Py_buffer *buffer, int flags): + self._view_count += 1 + + cpython.PyBuffer_FillInfo( + buffer, self, self._buf, self._length, + 1, # read-only + flags) + + def __releasebuffer__(self, Py_buffer *buffer): + self._view_count -= 1 + + cdef inline _check_readonly(self): + if self._view_count: + raise BufferError('the buffer is in read-only mode') + + cdef inline _ensure_alloced(self, ssize_t extra_length): + cdef ssize_t new_size = extra_length + self._length + + if new_size > self._size: + self._reallocate(new_size) + + cdef _reallocate(self, ssize_t new_size): + cdef char *new_buf + + if new_size < _BUFFER_MAX_GROW: + new_size = _BUFFER_MAX_GROW + else: + # Add a little extra + new_size += _BUFFER_INITIAL_SIZE + + if self._smallbuf_inuse: + new_buf = cpython.PyMem_Malloc( + sizeof(char) * new_size) + if new_buf is NULL: + self._buf = NULL + self._size = 0 + self._length = 0 + raise MemoryError + memcpy(new_buf, self._buf, self._size) + self._size = new_size + self._buf = new_buf + self._smallbuf_inuse = False + else: + new_buf = cpython.PyMem_Realloc( + self._buf, new_size) + if new_buf is NULL: + cpython.PyMem_Free(self._buf) + self._buf = NULL + self._size = 0 + self._length = 0 + raise MemoryError + self._buf = new_buf + self._size = new_size + + cdef inline start_message(self, char type): + if self._length != 0: + raise BufferError( + 'cannot start_message for a non-empty buffer') + self._ensure_alloced(5) + self._message_mode = 1 + self._buf[0] = type + self._length = 5 + + cdef inline end_message(self): + # "length-1" to exclude the message type byte + cdef ssize_t mlen = self._length - 1 + + self._check_readonly() + if not self._message_mode: + raise BufferError( + 'end_message can only be called with start_message') + if self._length < 5: + raise BufferError('end_message: buffer is too small') + if mlen > _MAXINT32: + raise BufferError('end_message: message is too large') + + hton.pack_int32(&self._buf[1], mlen) + return self + + cdef inline reset(self): + self._length = 0 + self._message_mode = 0 + + cdef write_buffer(self, WriteBuffer buf): + self._check_readonly() + + if not buf._length: + return + + self._ensure_alloced(buf._length) + memcpy(self._buf + self._length, + buf._buf, + buf._length) + self._length += buf._length + + cdef write_byte(self, char b): + self._check_readonly() + + self._ensure_alloced(1) + self._buf[self._length] = b + self._length += 1 + + cdef write_bytes(self, bytes data): + cdef char* buf + cdef ssize_t len + + cpython.PyBytes_AsStringAndSize(data, &buf, &len) + self.write_cstr(buf, len) + + cdef write_bytestring(self, bytes string): + cdef char* buf + cdef ssize_t len + + cpython.PyBytes_AsStringAndSize(string, &buf, &len) + # PyBytes_AsStringAndSize returns a null-terminated buffer, + # but the null byte is not counted in len. hence the + 1 + self.write_cstr(buf, len + 1) + + cdef write_str(self, str string, str encoding): + self.write_bytestring(string.encode(encoding)) + + cdef write_len_prefixed_buffer(self, WriteBuffer buf): + # Write a length-prefixed (not NULL-terminated) bytes sequence. + self.write_int32(buf.len()) + self.write_buffer(buf) + + cdef write_len_prefixed_bytes(self, bytes data): + # Write a length-prefixed (not NULL-terminated) bytes sequence. + cdef: + char *buf + ssize_t size + + cpython.PyBytes_AsStringAndSize(data, &buf, &size) + if size > _MAXINT32: + raise BufferError('string is too large') + # `size` does not account for the NULL at the end. + self.write_int32(size) + self.write_cstr(buf, size) + + cdef write_frbuf(self, FRBuffer *buf): + cdef: + ssize_t buf_len = buf.len + if buf_len > 0: + self.write_cstr(frb_read_all(buf), buf_len) + + cdef write_cstr(self, const char *data, ssize_t len): + self._check_readonly() + self._ensure_alloced(len) + + memcpy(self._buf + self._length, data, len) + self._length += len + + cdef write_int16(self, int16_t i): + self._check_readonly() + self._ensure_alloced(2) + + hton.pack_int16(&self._buf[self._length], i) + self._length += 2 + + cdef write_int32(self, int32_t i): + self._check_readonly() + self._ensure_alloced(4) + + hton.pack_int32(&self._buf[self._length], i) + self._length += 4 + + cdef write_int64(self, int64_t i): + self._check_readonly() + self._ensure_alloced(8) + + hton.pack_int64(&self._buf[self._length], i) + self._length += 8 + + cdef write_float(self, float f): + self._check_readonly() + self._ensure_alloced(4) + + hton.pack_float(&self._buf[self._length], f) + self._length += 4 + + cdef write_double(self, double d): + self._check_readonly() + self._ensure_alloced(8) + + hton.pack_double(&self._buf[self._length], d) + self._length += 8 + + @staticmethod + cdef WriteBuffer new_message(char type): + cdef WriteBuffer buf + buf = WriteBuffer.__new__(WriteBuffer) + buf.start_message(type) + return buf + + @staticmethod + cdef WriteBuffer new(): + cdef WriteBuffer buf + buf = WriteBuffer.__new__(WriteBuffer) + return buf + + +@cython.no_gc_clear +@cython.final +@cython.freelist(_BUFFER_FREELIST_SIZE) +cdef class ReadBuffer: + + def __cinit__(self): + self._bufs = collections.deque() + self._bufs_append = self._bufs.append + self._bufs_popleft = self._bufs.popleft + self._bufs_len = 0 + self._buf0 = None + self._buf0_prev = None + self._pos0 = 0 + self._len0 = 0 + self._length = 0 + + self._current_message_type = 0 + self._current_message_len = 0 + self._current_message_len_unread = 0 + self._current_message_ready = 0 + + cdef feed_data(self, data): + cdef: + ssize_t dlen + bytes data_bytes + + if not cpython.PyBytes_CheckExact(data): + if cpythonx.PyByteArray_CheckExact(data): + # ProactorEventLoop in Python 3.10+ seems to be sending + # bytearray objects instead of bytes. Handle this here + # to avoid duplicating this check in every data_received(). + data = bytes(data) + else: + raise BufferError( + 'feed_data: a bytes or bytearray object expected') + + # Uncomment the below code to test code paths that + # read single int/str/bytes sequences are split over + # multiple received buffers. + # + # ll = 107 + # if len(data) > ll: + # self.feed_data(data[:ll]) + # self.feed_data(data[ll:]) + # return + + data_bytes = data + + dlen = cpython.Py_SIZE(data_bytes) + if dlen == 0: + # EOF? + return + + self._bufs_append(data_bytes) + self._length += dlen + + if self._bufs_len == 0: + # First buffer + self._len0 = dlen + self._buf0 = data_bytes + + self._bufs_len += 1 + + cdef inline _ensure_first_buf(self): + if PG_DEBUG: + if self._len0 == 0: + raise BufferError('empty first buffer') + if self._length == 0: + raise BufferError('empty buffer') + + if self._pos0 == self._len0: + self._switch_to_next_buf() + + cdef _switch_to_next_buf(self): + # The first buffer is fully read, discard it + self._bufs_popleft() + self._bufs_len -= 1 + + # Shouldn't fail, since we've checked that `_length >= 1` + # in _ensure_first_buf() + self._buf0_prev = self._buf0 + self._buf0 = self._bufs[0] + + self._pos0 = 0 + self._len0 = len(self._buf0) + + if PG_DEBUG: + if self._len0 < 1: + raise BufferError( + 'debug: second buffer of ReadBuffer is empty') + + cdef inline const char* _try_read_bytes(self, ssize_t nbytes): + # Try to read *nbytes* from the first buffer. + # + # Returns pointer to data if there is at least *nbytes* + # in the buffer, NULL otherwise. + # + # Important: caller must call _ensure_first_buf() prior + # to calling try_read_bytes, and must not overread + + cdef: + const char *result + + if PG_DEBUG: + if nbytes > self._length: + return NULL + + if self._current_message_ready: + if self._current_message_len_unread < nbytes: + return NULL + + if self._pos0 + nbytes <= self._len0: + result = cpython.PyBytes_AS_STRING(self._buf0) + result += self._pos0 + self._pos0 += nbytes + self._length -= nbytes + if self._current_message_ready: + self._current_message_len_unread -= nbytes + return result + else: + return NULL + + cdef inline _read_into(self, char *buf, ssize_t nbytes): + cdef: + ssize_t nread + char *buf0 + + while True: + buf0 = cpython.PyBytes_AS_STRING(self._buf0) + + if self._pos0 + nbytes > self._len0: + nread = self._len0 - self._pos0 + memcpy(buf, buf0 + self._pos0, nread) + self._pos0 = self._len0 + self._length -= nread + nbytes -= nread + buf += nread + self._ensure_first_buf() + + else: + memcpy(buf, buf0 + self._pos0, nbytes) + self._pos0 += nbytes + self._length -= nbytes + break + + cdef inline _read_and_discard(self, ssize_t nbytes): + cdef: + ssize_t nread + + self._ensure_first_buf() + while True: + if self._pos0 + nbytes > self._len0: + nread = self._len0 - self._pos0 + self._pos0 = self._len0 + self._length -= nread + nbytes -= nread + self._ensure_first_buf() + + else: + self._pos0 += nbytes + self._length -= nbytes + break + + cdef bytes read_bytes(self, ssize_t nbytes): + cdef: + bytes result + ssize_t nread + const char *cbuf + char *buf + + self._ensure_first_buf() + cbuf = self._try_read_bytes(nbytes) + if cbuf != NULL: + return cpython.PyBytes_FromStringAndSize(cbuf, nbytes) + + if nbytes > self._length: + raise BufferError( + 'not enough data to read {} bytes'.format(nbytes)) + + if self._current_message_ready: + self._current_message_len_unread -= nbytes + if self._current_message_len_unread < 0: + raise BufferError('buffer overread') + + result = cpython.PyBytes_FromStringAndSize(NULL, nbytes) + buf = cpython.PyBytes_AS_STRING(result) + self._read_into(buf, nbytes) + return result + + cdef bytes read_len_prefixed_bytes(self): + cdef int32_t size = self.read_int32() + if size < 0: + raise BufferError( + 'negative length for a len-prefixed bytes value') + if size == 0: + return b'' + return self.read_bytes(size) + + cdef str read_len_prefixed_utf8(self): + cdef: + int32_t size + const char *cbuf + + size = self.read_int32() + if size < 0: + raise BufferError( + 'negative length for a len-prefixed bytes value') + + if size == 0: + return '' + + self._ensure_first_buf() + cbuf = self._try_read_bytes(size) + if cbuf != NULL: + return cpython.PyUnicode_DecodeUTF8(cbuf, size, NULL) + else: + return self.read_bytes(size).decode('utf-8') + + cdef read_uuid(self): + cdef: + bytes mem + const char *cbuf + + self._ensure_first_buf() + cbuf = self._try_read_bytes(16) + if cbuf != NULL: + return pg_uuid_from_buf(cbuf) + else: + return pg_UUID(self.read_bytes(16)) + + cdef inline char read_byte(self) except? -1: + cdef const char *first_byte + + if PG_DEBUG: + if not self._buf0: + raise BufferError( + 'debug: first buffer of ReadBuffer is empty') + + self._ensure_first_buf() + first_byte = self._try_read_bytes(1) + if first_byte is NULL: + raise BufferError('not enough data to read one byte') + + return first_byte[0] + + cdef inline int64_t read_int64(self) except? -1: + cdef: + bytes mem + const char *cbuf + + self._ensure_first_buf() + cbuf = self._try_read_bytes(8) + if cbuf != NULL: + return hton.unpack_int64(cbuf) + else: + mem = self.read_bytes(8) + return hton.unpack_int64(cpython.PyBytes_AS_STRING(mem)) + + cdef inline int32_t read_int32(self) except? -1: + cdef: + bytes mem + const char *cbuf + + self._ensure_first_buf() + cbuf = self._try_read_bytes(4) + if cbuf != NULL: + return hton.unpack_int32(cbuf) + else: + mem = self.read_bytes(4) + return hton.unpack_int32(cpython.PyBytes_AS_STRING(mem)) + + cdef inline int16_t read_int16(self) except? -1: + cdef: + bytes mem + const char *cbuf + + self._ensure_first_buf() + cbuf = self._try_read_bytes(2) + if cbuf != NULL: + return hton.unpack_int16(cbuf) + else: + mem = self.read_bytes(2) + return hton.unpack_int16(cpython.PyBytes_AS_STRING(mem)) + + cdef inline read_null_str(self): + if not self._current_message_ready: + raise BufferError( + 'read_null_str only works when the message guaranteed ' + 'to be in the buffer') + + cdef: + ssize_t pos + ssize_t nread + bytes result + const char *buf + const char *buf_start + + self._ensure_first_buf() + + buf_start = cpython.PyBytes_AS_STRING(self._buf0) + buf = buf_start + self._pos0 + while buf - buf_start < self._len0: + if buf[0] == 0: + pos = buf - buf_start + nread = pos - self._pos0 + buf = self._try_read_bytes(nread + 1) + if buf != NULL: + return cpython.PyBytes_FromStringAndSize(buf, nread) + else: + break + else: + buf += 1 + + result = b'' + while True: + pos = self._buf0.find(b'\x00', self._pos0) + if pos >= 0: + result += self._buf0[self._pos0 : pos] + nread = pos - self._pos0 + 1 + self._pos0 = pos + 1 + self._length -= nread + + self._current_message_len_unread -= nread + if self._current_message_len_unread < 0: + raise BufferError( + 'read_null_str: buffer overread') + + return result + + else: + result += self._buf0[self._pos0:] + nread = self._len0 - self._pos0 + self._pos0 = self._len0 + self._length -= nread + + self._current_message_len_unread -= nread + if self._current_message_len_unread < 0: + raise BufferError( + 'read_null_str: buffer overread') + + self._ensure_first_buf() + + cdef int32_t take_message(self) except -1: + cdef: + const char *cbuf + + if self._current_message_ready: + return 1 + + if self._current_message_type == 0: + if self._length < 1: + return 0 + self._ensure_first_buf() + cbuf = self._try_read_bytes(1) + if cbuf == NULL: + raise BufferError( + 'failed to read one byte on a non-empty buffer') + self._current_message_type = cbuf[0] + + if self._current_message_len == 0: + if self._length < 4: + return 0 + + self._ensure_first_buf() + cbuf = self._try_read_bytes(4) + if cbuf != NULL: + self._current_message_len = hton.unpack_int32(cbuf) + else: + self._current_message_len = self.read_int32() + + self._current_message_len_unread = self._current_message_len - 4 + + if self._length < self._current_message_len_unread: + return 0 + + self._current_message_ready = 1 + return 1 + + cdef inline int32_t take_message_type(self, char mtype) except -1: + cdef const char *buf0 + + if self._current_message_ready: + return self._current_message_type == mtype + elif self._length >= 1: + self._ensure_first_buf() + buf0 = cpython.PyBytes_AS_STRING(self._buf0) + + return buf0[self._pos0] == mtype and self.take_message() + else: + return 0 + + cdef int32_t put_message(self) except -1: + if not self._current_message_ready: + raise BufferError( + 'cannot put message: no message taken') + self._current_message_ready = False + return 0 + + cdef inline const char* try_consume_message(self, ssize_t* len): + cdef: + ssize_t buf_len + const char *buf + + if not self._current_message_ready: + return NULL + + self._ensure_first_buf() + buf_len = self._current_message_len_unread + buf = self._try_read_bytes(buf_len) + if buf != NULL: + len[0] = buf_len + self._finish_message() + return buf + + cdef discard_message(self): + if not self._current_message_ready: + raise BufferError('no message to discard') + if self._current_message_len_unread > 0: + self._read_and_discard(self._current_message_len_unread) + self._current_message_len_unread = 0 + self._finish_message() + + cdef bytes consume_message(self): + if not self._current_message_ready: + raise BufferError('no message to consume') + if self._current_message_len_unread > 0: + mem = self.read_bytes(self._current_message_len_unread) + else: + mem = b'' + self._finish_message() + return mem + + cdef redirect_messages(self, WriteBuffer buf, char mtype, + int stop_at=0): + if not self._current_message_ready: + raise BufferError( + 'consume_full_messages called on a buffer without a ' + 'complete first message') + if mtype != self._current_message_type: + raise BufferError( + 'consume_full_messages called with a wrong mtype') + if self._current_message_len_unread != self._current_message_len - 4: + raise BufferError( + 'consume_full_messages called on a partially read message') + + cdef: + const char* cbuf + ssize_t cbuf_len + int32_t msg_len + ssize_t new_pos0 + ssize_t pos_delta + int32_t done + + while True: + buf.write_byte(mtype) + buf.write_int32(self._current_message_len) + + cbuf = self.try_consume_message(&cbuf_len) + if cbuf != NULL: + buf.write_cstr(cbuf, cbuf_len) + else: + buf.write_bytes(self.consume_message()) + + if self._length > 0: + self._ensure_first_buf() + else: + return + + if stop_at and buf._length >= stop_at: + return + + # Fast path: exhaust buf0 as efficiently as possible. + if self._pos0 + 5 <= self._len0: + cbuf = cpython.PyBytes_AS_STRING(self._buf0) + new_pos0 = self._pos0 + cbuf_len = self._len0 + + done = 0 + # Scan the first buffer and find the position of the + # end of the last "mtype" message. + while new_pos0 + 5 <= cbuf_len: + if (cbuf + new_pos0)[0] != mtype: + done = 1 + break + if (stop_at and + (buf._length + new_pos0 - self._pos0) > stop_at): + done = 1 + break + msg_len = hton.unpack_int32(cbuf + new_pos0 + 1) + 1 + if new_pos0 + msg_len > cbuf_len: + break + new_pos0 += msg_len + + if new_pos0 != self._pos0: + assert self._pos0 < new_pos0 <= self._len0 + + pos_delta = new_pos0 - self._pos0 + buf.write_cstr( + cbuf + self._pos0, + pos_delta) + + self._pos0 = new_pos0 + self._length -= pos_delta + + assert self._length >= 0 + + if done: + # The next message is of a different type. + return + + # Back to slow path. + if not self.take_message_type(mtype): + return + + cdef bytearray consume_messages(self, char mtype): + """Consume consecutive messages of the same type.""" + cdef: + char *buf + ssize_t nbytes + ssize_t total_bytes = 0 + bytearray result + + if not self.take_message_type(mtype): + return None + + # consume_messages is a volume-oriented method, so + # we assume that the remainder of the buffer will contain + # messages of the requested type. + result = cpythonx.PyByteArray_FromStringAndSize(NULL, self._length) + buf = cpythonx.PyByteArray_AsString(result) + + while self.take_message_type(mtype): + self._ensure_first_buf() + nbytes = self._current_message_len_unread + self._read_into(buf, nbytes) + buf += nbytes + total_bytes += nbytes + self._finish_message() + + # Clamp the result to an actual size read. + cpythonx.PyByteArray_Resize(result, total_bytes) + + return result + + cdef finish_message(self): + if self._current_message_type == 0 or not self._current_message_ready: + # The message has already been finished (e.g by consume_message()), + # or has been put back by put_message(). + return + + if self._current_message_len_unread: + if PG_DEBUG: + mtype = chr(self._current_message_type) + + discarded = self.consume_message() + + if PG_DEBUG: + print('!!! discarding message {!r} unread data: {!r}'.format( + mtype, + discarded)) + + self._finish_message() + + cdef inline _finish_message(self): + self._current_message_type = 0 + self._current_message_len = 0 + self._current_message_ready = 0 + self._current_message_len_unread = 0 + + @staticmethod + cdef ReadBuffer new_message_parser(object data): + cdef ReadBuffer buf + + buf = ReadBuffer.__new__(ReadBuffer) + buf.feed_data(data) + + buf._current_message_ready = 1 + buf._current_message_len_unread = buf._len0 + + return buf diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/consts.pxi b/sbsheriff/Lib/site-packages/asyncpg/pgproto/consts.pxi new file mode 100644 index 0000000..dbce085 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/consts.pxi @@ -0,0 +1,12 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +DEF _BUFFER_INITIAL_SIZE = 1024 +DEF _BUFFER_MAX_GROW = 65536 +DEF _BUFFER_FREELIST_SIZE = 256 +DEF _MAXINT32 = 2**31 - 1 +DEF _NUMERIC_DECODER_SMALLBUF_SIZE = 256 diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/cpythonx.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/cpythonx.pxd new file mode 100644 index 0000000..7b4f4f3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/cpythonx.pxd @@ -0,0 +1,23 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from cpython cimport Py_buffer + +cdef extern from "Python.h": + int PyUnicode_1BYTE_KIND + + int PyByteArray_CheckExact(object) + int PyByteArray_Resize(object, ssize_t) except -1 + object PyByteArray_FromStringAndSize(const char *, ssize_t) + char* PyByteArray_AsString(object) + + object PyUnicode_FromString(const char *u) + const char* PyUnicode_AsUTF8AndSize( + object unicode, ssize_t *size) except NULL + + object PyUnicode_FromKindAndData( + int kind, const void *buffer, Py_ssize_t size) diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/debug.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/debug.pxd new file mode 100644 index 0000000..5e59ec1 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/debug.pxd @@ -0,0 +1,10 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef extern from "debug.h": + + cdef int PG_DEBUG diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pxd new file mode 100644 index 0000000..1ae9561 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pxd @@ -0,0 +1,49 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef: + + struct FRBuffer: + const char* buf + ssize_t len + + inline ssize_t frb_get_len(FRBuffer *frb): + return frb.len + + inline void frb_set_len(FRBuffer *frb, ssize_t new_len): + frb.len = new_len + + inline void frb_init(FRBuffer *frb, const char *buf, ssize_t len): + frb.buf = buf + frb.len = len + + inline const char* frb_read(FRBuffer *frb, ssize_t n) except NULL: + cdef const char *result + + if n > frb.len: + frb_check(frb, n) + + result = frb.buf + frb.buf += n + frb.len -= n + + return result + + inline const char* frb_read_all(FRBuffer *frb): + cdef const char *result + result = frb.buf + frb.buf += frb.len + frb.len = 0 + return result + + inline FRBuffer *frb_slice_from(FRBuffer *frb, + FRBuffer* source, ssize_t len): + frb.buf = frb_read(source, len) + frb.len = len + return frb + + object frb_check(FRBuffer *frb, ssize_t n) diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pyx b/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pyx new file mode 100644 index 0000000..f11f6b9 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/frb.pyx @@ -0,0 +1,12 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef object frb_check(FRBuffer *frb, ssize_t n): + if n > frb.len: + raise AssertionError( + f'insufficient data in buffer: requested {n} ' + f'remaining {frb.len}') diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/hton.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/hton.pxd new file mode 100644 index 0000000..9b73abc --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/hton.pxd @@ -0,0 +1,24 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from libc.stdint cimport int16_t, int32_t, uint16_t, uint32_t, int64_t, uint64_t + + +cdef extern from "./hton.h": + cdef void pack_int16(char *buf, int16_t x); + cdef void pack_int32(char *buf, int32_t x); + cdef void pack_int64(char *buf, int64_t x); + cdef void pack_float(char *buf, float f); + cdef void pack_double(char *buf, double f); + cdef int16_t unpack_int16(const char *buf); + cdef uint16_t unpack_uint16(const char *buf); + cdef int32_t unpack_int32(const char *buf); + cdef uint32_t unpack_uint32(const char *buf); + cdef int64_t unpack_int64(const char *buf); + cdef uint64_t unpack_uint64(const char *buf); + cdef float unpack_float(const char *buf); + cdef double unpack_double(const char *buf); diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.cp310-win_amd64.pyd new file mode 100644 index 0000000..e2423b7 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pxd new file mode 100644 index 0000000..ee9ec45 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pxd @@ -0,0 +1,19 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cimport cython +cimport cpython + +from libc.stdint cimport int16_t, int32_t, uint16_t, uint32_t, int64_t, uint64_t + + +include "./consts.pxi" +include "./frb.pxd" +include "./buffer.pxd" + + +include "./codecs/__init__.pxd" diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pyx b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pyx new file mode 100644 index 0000000..b880b7e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/pgproto.pyx @@ -0,0 +1,49 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cimport cython +cimport cpython + +from . cimport cpythonx + +from libc.stdint cimport int8_t, uint8_t, int16_t, uint16_t, \ + int32_t, uint32_t, int64_t, uint64_t, \ + INT16_MIN, INT16_MAX, INT32_MIN, INT32_MAX, \ + UINT32_MAX, INT64_MIN, INT64_MAX, UINT64_MAX + + +from . cimport hton +from . cimport tohex + +from .debug cimport PG_DEBUG +from . import types as pgproto_types + + +include "./consts.pxi" +include "./frb.pyx" +include "./buffer.pyx" +include "./uuid.pyx" + +include "./codecs/context.pyx" + +include "./codecs/bytea.pyx" +include "./codecs/text.pyx" + +include "./codecs/datetime.pyx" +include "./codecs/float.pyx" +include "./codecs/int.pyx" +include "./codecs/json.pyx" +include "./codecs/jsonpath.pyx" +include "./codecs/uuid.pyx" +include "./codecs/numeric.pyx" +include "./codecs/bits.pyx" +include "./codecs/geometry.pyx" +include "./codecs/hstore.pyx" +include "./codecs/misc.pyx" +include "./codecs/network.pyx" +include "./codecs/tid.pyx" +include "./codecs/pg_snapshot.pyx" diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/tohex.pxd b/sbsheriff/Lib/site-packages/asyncpg/pgproto/tohex.pxd new file mode 100644 index 0000000..12fda84 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/tohex.pxd @@ -0,0 +1,10 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef extern from "./tohex.h": + cdef void uuid_to_str(const char *source, char *dest) + cdef void uuid_to_hex(const char *source, char *dest) diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/types.py b/sbsheriff/Lib/site-packages/asyncpg/pgproto/types.py new file mode 100644 index 0000000..195f0f4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/types.py @@ -0,0 +1,421 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import builtins +import sys +import typing + +if sys.version_info >= (3, 8): + from typing import Literal, SupportsIndex +else: + from typing_extensions import Literal, SupportsIndex + + +__all__ = ( + 'BitString', 'Point', 'Path', 'Polygon', + 'Box', 'Line', 'LineSegment', 'Circle', +) + +_BitString = typing.TypeVar('_BitString', bound='BitString') +_BitOrderType = Literal['big', 'little'] + + +class BitString: + """Immutable representation of PostgreSQL `bit` and `varbit` types.""" + + __slots__ = '_bytes', '_bitlength' + + def __init__(self, + bitstring: typing.Optional[builtins.bytes] = None) -> None: + if not bitstring: + self._bytes = bytes() + self._bitlength = 0 + else: + bytelen = len(bitstring) // 8 + 1 + bytes_ = bytearray(bytelen) + byte = 0 + byte_pos = 0 + bit_pos = 0 + + for i, bit in enumerate(bitstring): + if bit == ' ': # type: ignore + continue + bit = int(bit) + if bit != 0 and bit != 1: + raise ValueError( + 'invalid bit value at position {}'.format(i)) + + byte |= bit << (8 - bit_pos - 1) + bit_pos += 1 + if bit_pos == 8: + bytes_[byte_pos] = byte + byte = 0 + byte_pos += 1 + bit_pos = 0 + + if bit_pos != 0: + bytes_[byte_pos] = byte + + bitlen = byte_pos * 8 + bit_pos + bytelen = byte_pos + (1 if bit_pos else 0) + + self._bytes = bytes(bytes_[:bytelen]) + self._bitlength = bitlen + + @classmethod + def frombytes(cls: typing.Type[_BitString], + bytes_: typing.Optional[builtins.bytes] = None, + bitlength: typing.Optional[int] = None) -> _BitString: + if bitlength is None: + if bytes_ is None: + bytes_ = bytes() + bitlength = 0 + else: + bitlength = len(bytes_) * 8 + else: + if bytes_ is None: + bytes_ = bytes(bitlength // 8 + 1) + bitlength = bitlength + else: + bytes_len = len(bytes_) * 8 + + if bytes_len == 0 and bitlength != 0: + raise ValueError('invalid bit length specified') + + if bytes_len != 0 and bitlength == 0: + raise ValueError('invalid bit length specified') + + if bitlength < bytes_len - 8: + raise ValueError('invalid bit length specified') + + if bitlength > bytes_len: + raise ValueError('invalid bit length specified') + + result = cls() + result._bytes = bytes_ + result._bitlength = bitlength + + return result + + @property + def bytes(self) -> builtins.bytes: + return self._bytes + + def as_string(self) -> str: + s = '' + + for i in range(self._bitlength): + s += str(self._getitem(i)) + if i % 4 == 3: + s += ' ' + + return s.strip() + + def to_int(self, bitorder: _BitOrderType = 'big', + *, signed: bool = False) -> int: + """Interpret the BitString as a Python int. + Acts similarly to int.from_bytes. + + :param bitorder: + Determines the bit order used to interpret the BitString. By + default, this function uses Postgres conventions for casting bits + to ints. If bitorder is 'big', the most significant bit is at the + start of the string (this is the same as the default). If bitorder + is 'little', the most significant bit is at the end of the string. + + :param bool signed: + Determines whether two's complement is used to interpret the + BitString. If signed is False, the returned value is always + non-negative. + + :return int: An integer representing the BitString. Information about + the BitString's exact length is lost. + + .. versionadded:: 0.18.0 + """ + x = int.from_bytes(self._bytes, byteorder='big') + x >>= -self._bitlength % 8 + if bitorder == 'big': + pass + elif bitorder == 'little': + x = int(bin(x)[:1:-1].ljust(self._bitlength, '0'), 2) + else: + raise ValueError("bitorder must be either 'big' or 'little'") + + if signed and self._bitlength > 0 and x & (1 << (self._bitlength - 1)): + x -= 1 << self._bitlength + return x + + @classmethod + def from_int(cls: typing.Type[_BitString], x: int, length: int, + bitorder: _BitOrderType = 'big', *, signed: bool = False) \ + -> _BitString: + """Represent the Python int x as a BitString. + Acts similarly to int.to_bytes. + + :param int x: + An integer to represent. Negative integers are represented in two's + complement form, unless the argument signed is False, in which case + negative integers raise an OverflowError. + + :param int length: + The length of the resulting BitString. An OverflowError is raised + if the integer is not representable in this many bits. + + :param bitorder: + Determines the bit order used in the BitString representation. By + default, this function uses Postgres conventions for casting ints + to bits. If bitorder is 'big', the most significant bit is at the + start of the string (this is the same as the default). If bitorder + is 'little', the most significant bit is at the end of the string. + + :param bool signed: + Determines whether two's complement is used in the BitString + representation. If signed is False and a negative integer is given, + an OverflowError is raised. + + :return BitString: A BitString representing the input integer, in the + form specified by the other input args. + + .. versionadded:: 0.18.0 + """ + # Exception types are by analogy to int.to_bytes + if length < 0: + raise ValueError("length argument must be non-negative") + elif length < x.bit_length(): + raise OverflowError("int too big to convert") + + if x < 0: + if not signed: + raise OverflowError("can't convert negative int to unsigned") + x &= (1 << length) - 1 + + if bitorder == 'big': + pass + elif bitorder == 'little': + x = int(bin(x)[:1:-1].ljust(length, '0'), 2) + else: + raise ValueError("bitorder must be either 'big' or 'little'") + + x <<= (-length % 8) + bytes_ = x.to_bytes((length + 7) // 8, byteorder='big') + return cls.frombytes(bytes_, length) + + def __repr__(self) -> str: + return ''.format(self.as_string()) + + __str__ = __repr__ + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BitString): + return NotImplemented + + return (self._bytes == other._bytes and + self._bitlength == other._bitlength) + + def __hash__(self) -> int: + return hash((self._bytes, self._bitlength)) + + def _getitem(self, i: int) -> int: + byte = self._bytes[i // 8] + shift = 8 - i % 8 - 1 + return (byte >> shift) & 0x1 + + def __getitem__(self, i: int) -> int: + if isinstance(i, slice): + raise NotImplementedError('BitString does not support slices') + + if i >= self._bitlength: + raise IndexError('index out of range') + + return self._getitem(i) + + def __len__(self) -> int: + return self._bitlength + + +class Point(typing.Tuple[float, float]): + """Immutable representation of PostgreSQL `point` type.""" + + __slots__ = () + + def __new__(cls, + x: typing.Union[typing.SupportsFloat, + SupportsIndex, + typing.Text, + builtins.bytes, + builtins.bytearray], + y: typing.Union[typing.SupportsFloat, + SupportsIndex, + typing.Text, + builtins.bytes, + builtins.bytearray]) -> 'Point': + return super().__new__(cls, + typing.cast(typing.Any, (float(x), float(y)))) + + def __repr__(self) -> str: + return '{}.{}({})'.format( + type(self).__module__, + type(self).__name__, + tuple.__repr__(self) + ) + + @property + def x(self) -> float: + return self[0] + + @property + def y(self) -> float: + return self[1] + + +class Box(typing.Tuple[Point, Point]): + """Immutable representation of PostgreSQL `box` type.""" + + __slots__ = () + + def __new__(cls, high: typing.Sequence[float], + low: typing.Sequence[float]) -> 'Box': + return super().__new__(cls, + typing.cast(typing.Any, (Point(*high), + Point(*low)))) + + def __repr__(self) -> str: + return '{}.{}({})'.format( + type(self).__module__, + type(self).__name__, + tuple.__repr__(self) + ) + + @property + def high(self) -> Point: + return self[0] + + @property + def low(self) -> Point: + return self[1] + + +class Line(typing.Tuple[float, float, float]): + """Immutable representation of PostgreSQL `line` type.""" + + __slots__ = () + + def __new__(cls, A: float, B: float, C: float) -> 'Line': + return super().__new__(cls, typing.cast(typing.Any, (A, B, C))) + + @property + def A(self) -> float: + return self[0] + + @property + def B(self) -> float: + return self[1] + + @property + def C(self) -> float: + return self[2] + + +class LineSegment(typing.Tuple[Point, Point]): + """Immutable representation of PostgreSQL `lseg` type.""" + + __slots__ = () + + def __new__(cls, p1: typing.Sequence[float], + p2: typing.Sequence[float]) -> 'LineSegment': + return super().__new__(cls, + typing.cast(typing.Any, (Point(*p1), + Point(*p2)))) + + def __repr__(self) -> str: + return '{}.{}({})'.format( + type(self).__module__, + type(self).__name__, + tuple.__repr__(self) + ) + + @property + def p1(self) -> Point: + return self[0] + + @property + def p2(self) -> Point: + return self[1] + + +class Path: + """Immutable representation of PostgreSQL `path` type.""" + + __slots__ = '_is_closed', 'points' + + def __init__(self, *points: typing.Sequence[float], + is_closed: bool = False) -> None: + self.points = tuple(Point(*p) for p in points) + self._is_closed = is_closed + + @property + def is_closed(self) -> bool: + return self._is_closed + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Path): + return NotImplemented + + return (self.points == other.points and + self._is_closed == other._is_closed) + + def __hash__(self) -> int: + return hash((self.points, self.is_closed)) + + def __iter__(self) -> typing.Iterator[Point]: + return iter(self.points) + + def __len__(self) -> int: + return len(self.points) + + @typing.overload + def __getitem__(self, i: int) -> Point: + ... + + @typing.overload + def __getitem__(self, i: slice) -> typing.Tuple[Point, ...]: + ... + + def __getitem__(self, i: typing.Union[int, slice]) \ + -> typing.Union[Point, typing.Tuple[Point, ...]]: + return self.points[i] + + def __contains__(self, point: object) -> bool: + return point in self.points + + +class Polygon(Path): + """Immutable representation of PostgreSQL `polygon` type.""" + + __slots__ = () + + def __init__(self, *points: typing.Sequence[float]) -> None: + # polygon is always closed + super().__init__(*points, is_closed=True) + + +class Circle(typing.Tuple[Point, float]): + """Immutable representation of PostgreSQL `circle` type.""" + + __slots__ = () + + def __new__(cls, center: Point, radius: float) -> 'Circle': + return super().__new__(cls, typing.cast(typing.Any, (center, radius))) + + @property + def center(self) -> Point: + return self[0] + + @property + def radius(self) -> float: + return self[1] diff --git a/sbsheriff/Lib/site-packages/asyncpg/pgproto/uuid.pyx b/sbsheriff/Lib/site-packages/asyncpg/pgproto/uuid.pyx new file mode 100644 index 0000000..593ea30 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pgproto/uuid.pyx @@ -0,0 +1,339 @@ +import functools +import uuid + +from libc.stdint cimport uint8_t, int8_t +from libc.string cimport memcpy, memcmp + + +# A more efficient UUID type implementation +# (6-7x faster than the starndard uuid.UUID): +# +# -= Benchmark results (less is better): =- +# +# std_UUID(bytes): 1.2368 +# c_UUID(bytes): * 0.1645 (7.52x) +# object(): 0.1483 +# +# std_UUID(str): 1.8038 +# c_UUID(str): * 0.2313 (7.80x) +# +# str(std_UUID()): 1.4625 +# str(c_UUID()): * 0.2681 (5.46x) +# str(object()): 0.5975 +# +# std_UUID().bytes: 0.3508 +# c_UUID().bytes: * 0.1068 (3.28x) +# +# std_UUID().int: 0.0871 +# c_UUID().int: * 0.0856 +# +# std_UUID().hex: 0.4871 +# c_UUID().hex: * 0.1405 +# +# hash(std_UUID()): 0.3635 +# hash(c_UUID()): * 0.1564 (2.32x) +# +# dct[std_UUID()]: 0.3319 +# dct[c_UUID()]: * 0.1570 (2.11x) +# +# std_UUID() ==: 0.3478 +# c_UUID() ==: * 0.0915 (3.80x) + + +cdef char _hextable[256] +_hextable[:] = [ + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1, 0,1,2,3,4,5,6,7,8,9,-1,-1,-1,-1,-1,-1,-1,10,11,12,13,14,15,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, + -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 +] + + +cdef std_UUID = uuid.UUID + + +cdef pg_uuid_bytes_from_str(str u, char *out): + cdef: + char *orig_buf + Py_ssize_t size + unsigned char ch + uint8_t acc, part, acc_set + int i, j + + orig_buf = cpythonx.PyUnicode_AsUTF8AndSize(u, &size) + if size > 36 or size < 32: + raise ValueError( + f'invalid UUID {u!r}: ' + f'length must be between 32..36 characters, got {size}') + + acc_set = 0 + j = 0 + for i in range(size): + ch = orig_buf[i] + if ch == b'-': + continue + + part = _hextable[ch] + if part == -1: + if ch >= 0x20 and ch <= 0x7e: + raise ValueError( + f'invalid UUID {u!r}: unexpected character {chr(ch)!r}') + else: + raise ValueError('invalid UUID {u!r}: unexpected character') + + if acc_set: + acc |= part + out[j] = acc + acc_set = 0 + j += 1 + else: + acc = (part << 4) + acc_set = 1 + + if j > 16 or (j == 16 and acc_set): + raise ValueError( + f'invalid UUID {u!r}: decodes to more than 16 bytes') + + if j != 16: + raise ValueError( + f'invalid UUID {u!r}: decodes to less than 16 bytes') + + +cdef class __UUIDReplaceMe: + pass + + +cdef pg_uuid_from_buf(const char *buf): + cdef: + UUID u = UUID.__new__(UUID) + memcpy(u._data, buf, 16) + return u + + +@cython.final +@cython.no_gc_clear +cdef class UUID(__UUIDReplaceMe): + + cdef: + char _data[16] + object _int + object _hash + object __weakref__ + + def __cinit__(self): + self._int = None + self._hash = None + + def __init__(self, inp): + cdef: + char *buf + Py_ssize_t size + + if cpython.PyBytes_Check(inp): + cpython.PyBytes_AsStringAndSize(inp, &buf, &size) + if size != 16: + raise ValueError(f'16 bytes were expected, got {size}') + memcpy(self._data, buf, 16) + + elif cpython.PyUnicode_Check(inp): + pg_uuid_bytes_from_str(inp, self._data) + else: + raise TypeError(f'a bytes or str object expected, got {inp!r}') + + @property + def bytes(self): + return cpython.PyBytes_FromStringAndSize(self._data, 16) + + @property + def int(self): + if self._int is None: + # The cache is important because `self.int` can be + # used multiple times by __hash__ etc. + self._int = int.from_bytes(self.bytes, 'big') + return self._int + + @property + def is_safe(self): + return uuid.SafeUUID.unknown + + def __str__(self): + cdef char out[36] + tohex.uuid_to_str(self._data, out) + return cpythonx.PyUnicode_FromKindAndData( + cpythonx.PyUnicode_1BYTE_KIND, out, 36) + + @property + def hex(self): + cdef char out[32] + tohex.uuid_to_hex(self._data, out) + return cpythonx.PyUnicode_FromKindAndData( + cpythonx.PyUnicode_1BYTE_KIND, out, 32) + + def __repr__(self): + return f"UUID('{self}')" + + def __reduce__(self): + return (type(self), (self.bytes,)) + + def __eq__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) == 0 + if isinstance(other, std_UUID): + return self.int == other.int + return NotImplemented + + def __ne__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) != 0 + if isinstance(other, std_UUID): + return self.int != other.int + return NotImplemented + + def __lt__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) < 0 + if isinstance(other, std_UUID): + return self.int < other.int + return NotImplemented + + def __gt__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) > 0 + if isinstance(other, std_UUID): + return self.int > other.int + return NotImplemented + + def __le__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) <= 0 + if isinstance(other, std_UUID): + return self.int <= other.int + return NotImplemented + + def __ge__(self, other): + if type(other) is UUID: + return memcmp(self._data, (other)._data, 16) >= 0 + if isinstance(other, std_UUID): + return self.int >= other.int + return NotImplemented + + def __hash__(self): + # In EdgeDB every schema object has a uuid and there are + # huge hash-maps of them. We want UUID.__hash__ to be + # as fast as possible. + if self._hash is not None: + return self._hash + + self._hash = hash(self.int) + return self._hash + + def __int__(self): + return self.int + + @property + def bytes_le(self): + bytes = self.bytes + return (bytes[4-1::-1] + bytes[6-1:4-1:-1] + bytes[8-1:6-1:-1] + + bytes[8:]) + + @property + def fields(self): + return (self.time_low, self.time_mid, self.time_hi_version, + self.clock_seq_hi_variant, self.clock_seq_low, self.node) + + @property + def time_low(self): + return self.int >> 96 + + @property + def time_mid(self): + return (self.int >> 80) & 0xffff + + @property + def time_hi_version(self): + return (self.int >> 64) & 0xffff + + @property + def clock_seq_hi_variant(self): + return (self.int >> 56) & 0xff + + @property + def clock_seq_low(self): + return (self.int >> 48) & 0xff + + @property + def time(self): + return (((self.time_hi_version & 0x0fff) << 48) | + (self.time_mid << 32) | self.time_low) + + @property + def clock_seq(self): + return (((self.clock_seq_hi_variant & 0x3f) << 8) | + self.clock_seq_low) + + @property + def node(self): + return self.int & 0xffffffffffff + + @property + def urn(self): + return 'urn:uuid:' + str(self) + + @property + def variant(self): + if not self.int & (0x8000 << 48): + return uuid.RESERVED_NCS + elif not self.int & (0x4000 << 48): + return uuid.RFC_4122 + elif not self.int & (0x2000 << 48): + return uuid.RESERVED_MICROSOFT + else: + return uuid.RESERVED_FUTURE + + @property + def version(self): + # The version bits are only meaningful for RFC 4122 UUIDs. + if self.variant == uuid.RFC_4122: + return int((self.int >> 76) & 0xf) + + +# +# In order for `isinstance(pgproto.UUID, uuid.UUID)` to work, +# patch __bases__ and __mro__ by injecting `uuid.UUID`. +# +# We apply brute-force here because the following pattern stopped +# working with Python 3.8: +# +# cdef class OurUUID: +# ... +# +# class UUID(OurUUID, uuid.UUID): +# ... +# +# With Python 3.8 it now produces +# +# "TypeError: multiple bases have instance lay-out conflict" +# +# error. Maybe it's possible to fix this some other way, but +# the best solution possible would be to just contribute our +# faster UUID to the standard library and not have this problem +# at all. For now this hack is pretty safe and should be +# compatible with future Pythons for long enough. +# +assert UUID.__bases__[0] is __UUIDReplaceMe +assert UUID.__mro__[1] is __UUIDReplaceMe +cpython.Py_INCREF(std_UUID) +cpython.PyTuple_SET_ITEM(UUID.__bases__, 0, std_UUID) +cpython.Py_INCREF(std_UUID) +cpython.PyTuple_SET_ITEM(UUID.__mro__, 1, std_UUID) +# + + +cdef pg_UUID = UUID diff --git a/sbsheriff/Lib/site-packages/asyncpg/pool.py b/sbsheriff/Lib/site-packages/asyncpg/pool.py new file mode 100644 index 0000000..9bd2a3e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/pool.py @@ -0,0 +1,1147 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import asyncio +import functools +import inspect +import logging +import time +import warnings + +from . import compat +from . import connection +from . import connect_utils +from . import exceptions +from . import protocol + + +logger = logging.getLogger(__name__) + + +class PoolConnectionProxyMeta(type): + + def __new__(mcls, name, bases, dct, *, wrap=False): + if wrap: + for attrname in dir(connection.Connection): + if attrname.startswith('_') or attrname in dct: + continue + + meth = getattr(connection.Connection, attrname) + if not inspect.isfunction(meth): + continue + + wrapper = mcls._wrap_connection_method(attrname) + wrapper = functools.update_wrapper(wrapper, meth) + dct[attrname] = wrapper + + if '__doc__' not in dct: + dct['__doc__'] = connection.Connection.__doc__ + + return super().__new__(mcls, name, bases, dct) + + @staticmethod + def _wrap_connection_method(meth_name): + def call_con_method(self, *args, **kwargs): + # This method will be owned by PoolConnectionProxy class. + if self._con is None: + raise exceptions.InterfaceError( + 'cannot call Connection.{}(): ' + 'connection has been released back to the pool'.format( + meth_name)) + + meth = getattr(self._con.__class__, meth_name) + return meth(self._con, *args, **kwargs) + + return call_con_method + + +class PoolConnectionProxy(connection._ConnectionProxy, + metaclass=PoolConnectionProxyMeta, + wrap=True): + + __slots__ = ('_con', '_holder') + + def __init__(self, holder: 'PoolConnectionHolder', + con: connection.Connection): + self._con = con + self._holder = holder + con._set_proxy(self) + + def __getattr__(self, attr): + # Proxy all unresolved attributes to the wrapped Connection object. + return getattr(self._con, attr) + + def _detach(self) -> connection.Connection: + if self._con is None: + return + + con, self._con = self._con, None + con._set_proxy(None) + return con + + def __repr__(self): + if self._con is None: + return '<{classname} [released] {id:#x}>'.format( + classname=self.__class__.__name__, id=id(self)) + else: + return '<{classname} {con!r} {id:#x}>'.format( + classname=self.__class__.__name__, con=self._con, id=id(self)) + + +class PoolConnectionHolder: + + __slots__ = ('_con', '_pool', '_loop', '_proxy', + '_max_queries', '_setup', + '_max_inactive_time', '_in_use', + '_inactive_callback', '_timeout', + '_generation') + + def __init__(self, pool, *, max_queries, setup, max_inactive_time): + + self._pool = pool + self._con = None + self._proxy = None + + self._max_queries = max_queries + self._max_inactive_time = max_inactive_time + self._setup = setup + self._inactive_callback = None + self._in_use = None # type: asyncio.Future + self._timeout = None + self._generation = None + + def is_connected(self): + return self._con is not None and not self._con.is_closed() + + def is_idle(self): + return not self._in_use + + async def connect(self): + if self._con is not None: + raise exceptions.InternalClientError( + 'PoolConnectionHolder.connect() called while another ' + 'connection already exists') + + self._con = await self._pool._get_new_connection() + self._generation = self._pool._generation + self._maybe_cancel_inactive_callback() + self._setup_inactive_callback() + + async def acquire(self) -> PoolConnectionProxy: + if self._con is None or self._con.is_closed(): + self._con = None + await self.connect() + + elif self._generation != self._pool._generation: + # Connections have been expired, re-connect the holder. + self._pool._loop.create_task( + self._con.close(timeout=self._timeout)) + self._con = None + await self.connect() + + self._maybe_cancel_inactive_callback() + + self._proxy = proxy = PoolConnectionProxy(self, self._con) + + if self._setup is not None: + try: + await self._setup(proxy) + except (Exception, asyncio.CancelledError) as ex: + # If a user-defined `setup` function fails, we don't + # know if the connection is safe for re-use, hence + # we close it. A new connection will be created + # when `acquire` is called again. + try: + # Use `close()` to close the connection gracefully. + # An exception in `setup` isn't necessarily caused + # by an IO or a protocol error. close() will + # do the necessary cleanup via _release_on_close(). + await self._con.close() + finally: + raise ex + + self._in_use = self._pool._loop.create_future() + + return proxy + + async def release(self, timeout): + if self._in_use is None: + raise exceptions.InternalClientError( + 'PoolConnectionHolder.release() called on ' + 'a free connection holder') + + if self._con.is_closed(): + # When closing, pool connections perform the necessary + # cleanup, so we don't have to do anything else here. + return + + self._timeout = None + + if self._con._protocol.queries_count >= self._max_queries: + # The connection has reached its maximum utilization limit, + # so close it. Connection.close() will call _release(). + await self._con.close(timeout=timeout) + return + + if self._generation != self._pool._generation: + # The connection has expired because it belongs to + # an older generation (Pool.expire_connections() has + # been called.) + await self._con.close(timeout=timeout) + return + + try: + budget = timeout + + if self._con._protocol._is_cancelling(): + # If the connection is in cancellation state, + # wait for the cancellation + started = time.monotonic() + await compat.wait_for( + self._con._protocol._wait_for_cancellation(), + budget) + if budget is not None: + budget -= time.monotonic() - started + + await self._con.reset(timeout=budget) + except (Exception, asyncio.CancelledError) as ex: + # If the `reset` call failed, terminate the connection. + # A new one will be created when `acquire` is called + # again. + try: + # An exception in `reset` is most likely caused by + # an IO error, so terminate the connection. + self._con.terminate() + finally: + raise ex + + # Free this connection holder and invalidate the + # connection proxy. + self._release() + + # Rearm the connection inactivity timer. + self._setup_inactive_callback() + + async def wait_until_released(self): + if self._in_use is None: + return + else: + await self._in_use + + async def close(self): + if self._con is not None: + # Connection.close() will call _release_on_close() to + # finish holder cleanup. + await self._con.close() + + def terminate(self): + if self._con is not None: + # Connection.terminate() will call _release_on_close() to + # finish holder cleanup. + self._con.terminate() + + def _setup_inactive_callback(self): + if self._inactive_callback is not None: + raise exceptions.InternalClientError( + 'pool connection inactivity timer already exists') + + if self._max_inactive_time: + self._inactive_callback = self._pool._loop.call_later( + self._max_inactive_time, self._deactivate_inactive_connection) + + def _maybe_cancel_inactive_callback(self): + if self._inactive_callback is not None: + self._inactive_callback.cancel() + self._inactive_callback = None + + def _deactivate_inactive_connection(self): + if self._in_use is not None: + raise exceptions.InternalClientError( + 'attempting to deactivate an acquired connection') + + if self._con is not None: + # The connection is idle and not in use, so it's fine to + # use terminate() instead of close(). + self._con.terminate() + # Must call clear_connection, because _deactivate_connection + # is called when the connection is *not* checked out, and + # so terminate() above will not call the below. + self._release_on_close() + + def _release_on_close(self): + self._maybe_cancel_inactive_callback() + self._release() + self._con = None + + def _release(self): + """Release this connection holder.""" + if self._in_use is None: + # The holder is not checked out. + return + + if not self._in_use.done(): + self._in_use.set_result(None) + self._in_use = None + + # Deinitialize the connection proxy. All subsequent + # operations on it will fail. + if self._proxy is not None: + self._proxy._detach() + self._proxy = None + + # Put ourselves back to the pool queue. + self._pool._queue.put_nowait(self) + + +class Pool: + """A connection pool. + + Connection pool can be used to manage a set of connections to the database. + Connections are first acquired from the pool, then used, and then released + back to the pool. Once a connection is released, it's reset to close all + open cursors and other resources *except* prepared statements. + + Pools are created by calling :func:`~asyncpg.pool.create_pool`. + """ + + __slots__ = ( + '_queue', '_loop', '_minsize', '_maxsize', + '_init', '_connect_args', '_connect_kwargs', + '_working_addr', '_working_config', '_working_params', + '_holders', '_initialized', '_initializing', '_closing', + '_closed', '_connection_class', '_record_class', '_generation', + '_setup', '_max_queries', '_max_inactive_connection_lifetime' + ) + + def __init__(self, *connect_args, + min_size, + max_size, + max_queries, + max_inactive_connection_lifetime, + setup, + init, + loop, + connection_class, + record_class, + **connect_kwargs): + + if len(connect_args) > 1: + warnings.warn( + "Passing multiple positional arguments to asyncpg.Pool " + "constructor is deprecated and will be removed in " + "asyncpg 0.17.0. The non-deprecated form is " + "asyncpg.Pool(, **kwargs)", + DeprecationWarning, stacklevel=2) + + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + + if max_size <= 0: + raise ValueError('max_size is expected to be greater than zero') + + if min_size < 0: + raise ValueError( + 'min_size is expected to be greater or equal to zero') + + if min_size > max_size: + raise ValueError('min_size is greater than max_size') + + if max_queries <= 0: + raise ValueError('max_queries is expected to be greater than zero') + + if max_inactive_connection_lifetime < 0: + raise ValueError( + 'max_inactive_connection_lifetime is expected to be greater ' + 'or equal to zero') + + if not issubclass(connection_class, connection.Connection): + raise TypeError( + 'connection_class is expected to be a subclass of ' + 'asyncpg.Connection, got {!r}'.format(connection_class)) + + if not issubclass(record_class, protocol.Record): + raise TypeError( + 'record_class is expected to be a subclass of ' + 'asyncpg.Record, got {!r}'.format(record_class)) + + self._minsize = min_size + self._maxsize = max_size + + self._holders = [] + self._initialized = False + self._initializing = False + self._queue = None + + self._working_addr = None + self._working_config = None + self._working_params = None + + self._connection_class = connection_class + self._record_class = record_class + + self._closing = False + self._closed = False + self._generation = 0 + self._init = init + self._connect_args = connect_args + self._connect_kwargs = connect_kwargs + + self._setup = setup + self._max_queries = max_queries + self._max_inactive_connection_lifetime = \ + max_inactive_connection_lifetime + + async def _async__init__(self): + if self._initialized: + return + if self._initializing: + raise exceptions.InterfaceError( + 'pool is being initialized in another task') + if self._closed: + raise exceptions.InterfaceError('pool is closed') + self._initializing = True + try: + await self._initialize() + return self + finally: + self._initializing = False + self._initialized = True + + async def _initialize(self): + self._queue = asyncio.LifoQueue(maxsize=self._maxsize) + for _ in range(self._maxsize): + ch = PoolConnectionHolder( + self, + max_queries=self._max_queries, + max_inactive_time=self._max_inactive_connection_lifetime, + setup=self._setup) + + self._holders.append(ch) + self._queue.put_nowait(ch) + + if self._minsize: + # Since we use a LIFO queue, the first items in the queue will be + # the last ones in `self._holders`. We want to pre-connect the + # first few connections in the queue, therefore we want to walk + # `self._holders` in reverse. + + # Connect the first connection holder in the queue so that it + # can record `_working_addr` and `_working_opts`, which will + # speed up successive connection attempts. + first_ch = self._holders[-1] # type: PoolConnectionHolder + await first_ch.connect() + + if self._minsize > 1: + connect_tasks = [] + for i, ch in enumerate(reversed(self._holders[:-1])): + # `minsize - 1` because we already have first_ch + if i >= self._minsize - 1: + break + connect_tasks.append(ch.connect()) + + await asyncio.gather(*connect_tasks) + + def get_size(self): + """Return the current number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return sum(h.is_connected() for h in self._holders) + + def get_min_size(self): + """Return the minimum number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return self._minsize + + def get_max_size(self): + """Return the maximum allowed number of connections in this pool. + + .. versionadded:: 0.25.0 + """ + return self._maxsize + + def get_idle_size(self): + """Return the current number of idle connections in this pool. + + .. versionadded:: 0.25.0 + """ + return sum(h.is_connected() and h.is_idle() for h in self._holders) + + def set_connect_args(self, dsn=None, **connect_kwargs): + r"""Set the new connection arguments for this pool. + + The new connection arguments will be used for all subsequent + new connection attempts. Existing connections will remain until + they expire. Use :meth:`Pool.expire_connections() + ` to expedite the connection + expiry. + + :param str dsn: + Connection arguments specified using as a single string in + the following format: + ``postgres://user:pass@host:port/database?option=value``. + + :param \*\*connect_kwargs: + Keyword arguments for the :func:`~asyncpg.connection.connect` + function. + + .. versionadded:: 0.16.0 + """ + + self._connect_args = [dsn] + self._connect_kwargs = connect_kwargs + self._working_addr = None + self._working_config = None + self._working_params = None + + async def _get_new_connection(self): + if self._working_addr is None: + # First connection attempt on this pool. + con = await connection.connect( + *self._connect_args, + loop=self._loop, + connection_class=self._connection_class, + record_class=self._record_class, + **self._connect_kwargs) + + self._working_addr = con._addr + self._working_config = con._config + self._working_params = con._params + + else: + # We've connected before and have a resolved address, + # and parsed options and config. + con = await connect_utils._connect_addr( + loop=self._loop, + addr=self._working_addr, + timeout=self._working_params.connect_timeout, + config=self._working_config, + params=self._working_params, + connection_class=self._connection_class, + record_class=self._record_class, + ) + + if self._init is not None: + try: + await self._init(con) + except (Exception, asyncio.CancelledError) as ex: + # If a user-defined `init` function fails, we don't + # know if the connection is safe for re-use, hence + # we close it. A new connection will be created + # when `acquire` is called again. + try: + # Use `close()` to close the connection gracefully. + # An exception in `init` isn't necessarily caused + # by an IO or a protocol error. close() will + # do the necessary cleanup via _release_on_close(). + await con.close() + finally: + raise ex + + return con + + async def execute(self, query: str, *args, timeout: float=None) -> str: + """Execute an SQL command (or commands). + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.execute() `. + + .. versionadded:: 0.10.0 + """ + async with self.acquire() as con: + return await con.execute(query, *args, timeout=timeout) + + async def executemany(self, command: str, args, *, timeout: float=None): + """Execute an SQL *command* for each sequence of arguments in *args*. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.executemany() + `. + + .. versionadded:: 0.10.0 + """ + async with self.acquire() as con: + return await con.executemany(command, args, timeout=timeout) + + async def fetch( + self, + query, + *args, + timeout=None, + record_class=None + ) -> list: + """Run a query and return the results as a list of :class:`Record`. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.fetch() `. + + .. versionadded:: 0.10.0 + """ + async with self.acquire() as con: + return await con.fetch( + query, + *args, + timeout=timeout, + record_class=record_class + ) + + async def fetchval(self, query, *args, column=0, timeout=None): + """Run a query and return a value in the first row. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.fetchval() + `. + + .. versionadded:: 0.10.0 + """ + async with self.acquire() as con: + return await con.fetchval( + query, *args, column=column, timeout=timeout) + + async def fetchrow(self, query, *args, timeout=None, record_class=None): + """Run a query and return the first row. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.fetchrow() `. + + .. versionadded:: 0.10.0 + """ + async with self.acquire() as con: + return await con.fetchrow( + query, + *args, + timeout=timeout, + record_class=record_class + ) + + async def copy_from_table( + self, + table_name, + *, + output, + columns=None, + schema_name=None, + timeout=None, + format=None, + oids=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + encoding=None + ): + """Copy table contents to a file or file-like object. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_from_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_from_table( + table_name, + output=output, + columns=columns, + schema_name=schema_name, + timeout=timeout, + format=format, + oids=oids, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + encoding=encoding + ) + + async def copy_from_query( + self, + query, + *args, + output, + timeout=None, + format=None, + oids=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + encoding=None + ): + """Copy the results of a query to a file or file-like object. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_from_query() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_from_query( + query, + *args, + output=output, + timeout=timeout, + format=format, + oids=oids, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + encoding=encoding + ) + + async def copy_to_table( + self, + table_name, + *, + source, + columns=None, + schema_name=None, + timeout=None, + format=None, + oids=None, + freeze=None, + delimiter=None, + null=None, + header=None, + quote=None, + escape=None, + force_quote=None, + force_not_null=None, + force_null=None, + encoding=None + ): + """Copy data to the specified table. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_to_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_to_table( + table_name, + source=source, + columns=columns, + schema_name=schema_name, + timeout=timeout, + format=format, + oids=oids, + freeze=freeze, + delimiter=delimiter, + null=null, + header=header, + quote=quote, + escape=escape, + force_quote=force_quote, + force_not_null=force_not_null, + force_null=force_null, + encoding=encoding + ) + + async def copy_records_to_table( + self, + table_name, + *, + records, + columns=None, + schema_name=None, + timeout=None + ): + """Copy a list of records to the specified table using binary COPY. + + Pool performs this operation using one of its connections. Other than + that, it behaves identically to + :meth:`Connection.copy_records_to_table() + `. + + .. versionadded:: 0.24.0 + """ + async with self.acquire() as con: + return await con.copy_records_to_table( + table_name, + records=records, + columns=columns, + schema_name=schema_name, + timeout=timeout + ) + + def acquire(self, *, timeout=None): + """Acquire a database connection from the pool. + + :param float timeout: A timeout for acquiring a Connection. + :return: An instance of :class:`~asyncpg.connection.Connection`. + + Can be used in an ``await`` expression or with an ``async with`` block. + + .. code-block:: python + + async with pool.acquire() as con: + await con.execute(...) + + Or: + + .. code-block:: python + + con = await pool.acquire() + try: + await con.execute(...) + finally: + await pool.release(con) + """ + return PoolAcquireContext(self, timeout) + + async def _acquire(self, timeout): + async def _acquire_impl(): + ch = await self._queue.get() # type: PoolConnectionHolder + try: + proxy = await ch.acquire() # type: PoolConnectionProxy + except (Exception, asyncio.CancelledError): + self._queue.put_nowait(ch) + raise + else: + # Record the timeout, as we will apply it by default + # in release(). + ch._timeout = timeout + return proxy + + if self._closing: + raise exceptions.InterfaceError('pool is closing') + self._check_init() + + if timeout is None: + return await _acquire_impl() + else: + return await compat.wait_for( + _acquire_impl(), timeout=timeout) + + async def release(self, connection, *, timeout=None): + """Release a database connection back to the pool. + + :param Connection connection: + A :class:`~asyncpg.connection.Connection` object to release. + :param float timeout: + A timeout for releasing the connection. If not specified, defaults + to the timeout provided in the corresponding call to the + :meth:`Pool.acquire() ` method. + + .. versionchanged:: 0.14.0 + Added the *timeout* parameter. + """ + if (type(connection) is not PoolConnectionProxy or + connection._holder._pool is not self): + raise exceptions.InterfaceError( + 'Pool.release() received invalid connection: ' + '{connection!r} is not a member of this pool'.format( + connection=connection)) + + if connection._con is None: + # Already released, do nothing. + return + + self._check_init() + + # Let the connection do its internal housekeeping when its released. + connection._con._on_release() + + ch = connection._holder + if timeout is None: + timeout = ch._timeout + + # Use asyncio.shield() to guarantee that task cancellation + # does not prevent the connection from being returned to the + # pool properly. + return await asyncio.shield(ch.release(timeout)) + + async def close(self): + """Attempt to gracefully close all connections in the pool. + + Wait until all pool connections are released, close them and + shut down the pool. If any error (including cancellation) occurs + in ``close()`` the pool will terminate by calling + :meth:`Pool.terminate() `. + + It is advisable to use :func:`python:asyncio.wait_for` to set + a timeout. + + .. versionchanged:: 0.16.0 + ``close()`` now waits until all pool connections are released + before closing them and the pool. Errors raised in ``close()`` + will cause immediate pool termination. + """ + if self._closed: + return + self._check_init() + + self._closing = True + + warning_callback = None + try: + warning_callback = self._loop.call_later( + 60, self._warn_on_long_close) + + release_coros = [ + ch.wait_until_released() for ch in self._holders] + await asyncio.gather(*release_coros) + + close_coros = [ + ch.close() for ch in self._holders] + await asyncio.gather(*close_coros) + + except (Exception, asyncio.CancelledError): + self.terminate() + raise + + finally: + if warning_callback is not None: + warning_callback.cancel() + self._closed = True + self._closing = False + + def _warn_on_long_close(self): + logger.warning('Pool.close() is taking over 60 seconds to complete. ' + 'Check if you have any unreleased connections left. ' + 'Use asyncio.wait_for() to set a timeout for ' + 'Pool.close().') + + def terminate(self): + """Terminate all connections in the pool.""" + if self._closed: + return + self._check_init() + for ch in self._holders: + ch.terminate() + self._closed = True + + async def expire_connections(self): + """Expire all currently open connections. + + Cause all currently open connections to get replaced on the + next :meth:`~asyncpg.pool.Pool.acquire()` call. + + .. versionadded:: 0.16.0 + """ + self._generation += 1 + + def _check_init(self): + if not self._initialized: + if self._initializing: + raise exceptions.InterfaceError( + 'pool is being initialized, but not yet ready: ' + 'likely there is a race between creating a pool and ' + 'using it') + raise exceptions.InterfaceError('pool is not initialized') + if self._closed: + raise exceptions.InterfaceError('pool is closed') + + def _drop_statement_cache(self): + # Drop statement cache for all connections in the pool. + for ch in self._holders: + if ch._con is not None: + ch._con._drop_local_statement_cache() + + def _drop_type_cache(self): + # Drop type codec cache for all connections in the pool. + for ch in self._holders: + if ch._con is not None: + ch._con._drop_local_type_cache() + + def __await__(self): + return self._async__init__().__await__() + + async def __aenter__(self): + await self._async__init__() + return self + + async def __aexit__(self, *exc): + await self.close() + + +class PoolAcquireContext: + + __slots__ = ('timeout', 'connection', 'done', 'pool') + + def __init__(self, pool, timeout): + self.pool = pool + self.timeout = timeout + self.connection = None + self.done = False + + async def __aenter__(self): + if self.connection is not None or self.done: + raise exceptions.InterfaceError('a connection is already acquired') + self.connection = await self.pool._acquire(self.timeout) + return self.connection + + async def __aexit__(self, *exc): + self.done = True + con = self.connection + self.connection = None + await self.pool.release(con) + + def __await__(self): + self.done = True + return self.pool._acquire(self.timeout).__await__() + + +def create_pool(dsn=None, *, + min_size=10, + max_size=10, + max_queries=50000, + max_inactive_connection_lifetime=300.0, + setup=None, + init=None, + loop=None, + connection_class=connection.Connection, + record_class=protocol.Record, + **connect_kwargs): + r"""Create a connection pool. + + Can be used either with an ``async with`` block: + + .. code-block:: python + + async with asyncpg.create_pool(user='postgres', + command_timeout=60) as pool: + await pool.fetch('SELECT 1') + + Or to perform multiple operations on a single connection: + + .. code-block:: python + + async with asyncpg.create_pool(user='postgres', + command_timeout=60) as pool: + async with pool.acquire() as con: + await con.execute(''' + CREATE TABLE names ( + id serial PRIMARY KEY, + name VARCHAR (255) NOT NULL) + ''') + await con.fetch('SELECT 1') + + Or directly with ``await`` (not recommended): + + .. code-block:: python + + pool = await asyncpg.create_pool(user='postgres', command_timeout=60) + con = await pool.acquire() + try: + await con.fetch('SELECT 1') + finally: + await pool.release(con) + + .. warning:: + Prepared statements and cursors returned by + :meth:`Connection.prepare() ` + and :meth:`Connection.cursor() ` + become invalid once the connection is released. Likewise, all + notification and log listeners are removed, and ``asyncpg`` will + issue a warning if there are any listener callbacks registered on a + connection that is being released to the pool. + + :param str dsn: + Connection arguments specified using as a single string in + the following format: + ``postgres://user:pass@host:port/database?option=value``. + + :param \*\*connect_kwargs: + Keyword arguments for the :func:`~asyncpg.connection.connect` + function. + + :param Connection connection_class: + The class to use for connections. Must be a subclass of + :class:`~asyncpg.connection.Connection`. + + :param type record_class: + If specified, the class to use for records returned by queries on + the connections in this pool. Must be a subclass of + :class:`~asyncpg.Record`. + + :param int min_size: + Number of connection the pool will be initialized with. + + :param int max_size: + Max number of connections in the pool. + + :param int max_queries: + Number of queries after a connection is closed and replaced + with a new connection. + + :param float max_inactive_connection_lifetime: + Number of seconds after which inactive connections in the + pool will be closed. Pass ``0`` to disable this mechanism. + + :param coroutine setup: + A coroutine to prepare a connection right before it is returned + from :meth:`Pool.acquire() `. An example use + case would be to automatically set up notifications listeners for + all connections of a pool. + + :param coroutine init: + A coroutine to initialize a connection when it is created. + An example use case would be to setup type codecs with + :meth:`Connection.set_builtin_type_codec() <\ + asyncpg.connection.Connection.set_builtin_type_codec>` + or :meth:`Connection.set_type_codec() <\ + asyncpg.connection.Connection.set_type_codec>`. + + :param loop: + An asyncio event loop instance. If ``None``, the default + event loop will be used. + + :return: An instance of :class:`~asyncpg.pool.Pool`. + + .. versionchanged:: 0.10.0 + An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any + attempted operation on a released connection. + + .. versionchanged:: 0.13.0 + An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any + attempted operation on a prepared statement or a cursor created + on a connection that has been released to the pool. + + .. versionchanged:: 0.13.0 + An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced + if there are any active listeners (added via + :meth:`Connection.add_listener() + ` + or :meth:`Connection.add_log_listener() + `) present on the + connection at the moment of its release to the pool. + + .. versionchanged:: 0.22.0 + Added the *record_class* parameter. + """ + return Pool( + dsn, + connection_class=connection_class, + record_class=record_class, + min_size=min_size, max_size=max_size, + max_queries=max_queries, loop=loop, setup=setup, init=init, + max_inactive_connection_lifetime=max_inactive_connection_lifetime, + **connect_kwargs) diff --git a/sbsheriff/Lib/site-packages/asyncpg/prepared_stmt.py b/sbsheriff/Lib/site-packages/asyncpg/prepared_stmt.py new file mode 100644 index 0000000..8e241d6 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/prepared_stmt.py @@ -0,0 +1,259 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import json + +from . import connresource +from . import cursor +from . import exceptions + + +class PreparedStatement(connresource.ConnectionResource): + """A representation of a prepared statement.""" + + __slots__ = ('_state', '_query', '_last_status') + + def __init__(self, connection, query, state): + super().__init__(connection) + self._state = state + self._query = query + state.attach() + self._last_status = None + + @connresource.guarded + def get_name(self) -> str: + """Return the name of this prepared statement. + + .. versionadded:: 0.25.0 + """ + return self._state.name + + @connresource.guarded + def get_query(self) -> str: + """Return the text of the query for this prepared statement. + + Example:: + + stmt = await connection.prepare('SELECT $1::int') + assert stmt.get_query() == "SELECT $1::int" + """ + return self._query + + @connresource.guarded + def get_statusmsg(self) -> str: + """Return the status of the executed command. + + Example:: + + stmt = await connection.prepare('CREATE TABLE mytab (a int)') + await stmt.fetch() + assert stmt.get_statusmsg() == "CREATE TABLE" + """ + if self._last_status is None: + return self._last_status + return self._last_status.decode() + + @connresource.guarded + def get_parameters(self): + """Return a description of statement parameters types. + + :return: A tuple of :class:`asyncpg.types.Type`. + + Example:: + + stmt = await connection.prepare('SELECT ($1::int, $2::text)') + print(stmt.get_parameters()) + + # Will print: + # (Type(oid=23, name='int4', kind='scalar', schema='pg_catalog'), + # Type(oid=25, name='text', kind='scalar', schema='pg_catalog')) + """ + return self._state._get_parameters() + + @connresource.guarded + def get_attributes(self): + """Return a description of relation attributes (columns). + + :return: A tuple of :class:`asyncpg.types.Attribute`. + + Example:: + + st = await self.con.prepare(''' + SELECT typname, typnamespace FROM pg_type + ''') + print(st.get_attributes()) + + # Will print: + # (Attribute( + # name='typname', + # type=Type(oid=19, name='name', kind='scalar', + # schema='pg_catalog')), + # Attribute( + # name='typnamespace', + # type=Type(oid=26, name='oid', kind='scalar', + # schema='pg_catalog'))) + """ + return self._state._get_attributes() + + @connresource.guarded + def cursor(self, *args, prefetch=None, + timeout=None) -> cursor.CursorFactory: + """Return a *cursor factory* for the prepared statement. + + :param args: Query arguments. + :param int prefetch: The number of rows the *cursor iterator* + will prefetch (defaults to ``50``.) + :param float timeout: Optional timeout in seconds. + + :return: A :class:`~cursor.CursorFactory` object. + """ + return cursor.CursorFactory( + self._connection, + self._query, + self._state, + args, + prefetch, + timeout, + self._state.record_class, + ) + + @connresource.guarded + async def explain(self, *args, analyze=False): + """Return the execution plan of the statement. + + :param args: Query arguments. + :param analyze: If ``True``, the statement will be executed and + the run time statitics added to the return value. + + :return: An object representing the execution plan. This value + is actually a deserialized JSON output of the SQL + ``EXPLAIN`` command. + """ + query = 'EXPLAIN (FORMAT JSON, VERBOSE' + if analyze: + query += ', ANALYZE) ' + else: + query += ') ' + query += self._state.query + + if analyze: + # From PostgreSQL docs: + # Important: Keep in mind that the statement is actually + # executed when the ANALYZE option is used. Although EXPLAIN + # will discard any output that a SELECT would return, other + # side effects of the statement will happen as usual. If you + # wish to use EXPLAIN ANALYZE on an INSERT, UPDATE, DELETE, + # CREATE TABLE AS, or EXECUTE statement without letting the + # command affect your data, use this approach: + # BEGIN; + # EXPLAIN ANALYZE ...; + # ROLLBACK; + tr = self._connection.transaction() + await tr.start() + try: + data = await self._connection.fetchval(query, *args) + finally: + await tr.rollback() + else: + data = await self._connection.fetchval(query, *args) + + return json.loads(data) + + @connresource.guarded + async def fetch(self, *args, timeout=None): + r"""Execute the statement and return a list of :class:`Record` objects. + + :param str query: Query text + :param args: Query arguments + :param float timeout: Optional timeout value in seconds. + + :return: A list of :class:`Record` instances. + """ + data = await self.__bind_execute(args, 0, timeout) + return data + + @connresource.guarded + async def fetchval(self, *args, column=0, timeout=None): + """Execute the statement and return a value in the first row. + + :param args: Query arguments. + :param int column: Numeric index within the record of the value to + return (defaults to 0). + :param float timeout: Optional timeout value in seconds. + If not specified, defaults to the value of + ``command_timeout`` argument to the ``Connection`` + instance constructor. + + :return: The value of the specified column of the first record. + """ + data = await self.__bind_execute(args, 1, timeout) + if not data: + return None + return data[0][column] + + @connresource.guarded + async def fetchrow(self, *args, timeout=None): + """Execute the statement and return the first row. + + :param str query: Query text + :param args: Query arguments + :param float timeout: Optional timeout value in seconds. + + :return: The first row as a :class:`Record` instance. + """ + data = await self.__bind_execute(args, 1, timeout) + if not data: + return None + return data[0] + + @connresource.guarded + async def executemany(self, args, *, timeout: float=None): + """Execute the statement for each sequence of arguments in *args*. + + :param args: An iterable containing sequences of arguments. + :param float timeout: Optional timeout value in seconds. + :return None: This method discards the results of the operations. + + .. versionadded:: 0.22.0 + """ + return await self.__do_execute( + lambda protocol: protocol.bind_execute_many( + self._state, args, '', timeout)) + + async def __do_execute(self, executor): + protocol = self._connection._protocol + try: + return await executor(protocol) + except exceptions.OutdatedSchemaCacheError: + await self._connection.reload_schema_state() + # We can not find all manually created prepared statements, so just + # drop known cached ones in the `self._connection`. + # Other manually created prepared statements will fail and + # invalidate themselves (unfortunately, clearing caches again). + self._state.mark_closed() + raise + + async def __bind_execute(self, args, limit, timeout): + data, status, _ = await self.__do_execute( + lambda protocol: protocol.bind_execute( + self._state, args, '', limit, True, timeout)) + self._last_status = status + return data + + def _check_open(self, meth_name): + if self._state.closed: + raise exceptions.InterfaceError( + 'cannot call PreparedStmt.{}(): ' + 'the prepared statement is closed'.format(meth_name)) + + def _check_conn_validity(self, meth_name): + self._check_open(meth_name) + super()._check_conn_validity(meth_name) + + def __del__(self): + self._state.detach() + self._connection._maybe_gc_stmt(self._state) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/protocol/__init__.py new file mode 100644 index 0000000..8b3e06a --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/__init__.py @@ -0,0 +1,9 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + +# flake8: NOQA + +from .protocol import Protocol, Record, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/protocol/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..2c48140 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/protocol/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/__init__.py b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..45f7c28 Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/array.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/array.pyx new file mode 100644 index 0000000..f8f9b8d --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/array.pyx @@ -0,0 +1,875 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from collections.abc import (Iterable as IterableABC, + Mapping as MappingABC, + Sized as SizedABC) + +from asyncpg import exceptions + + +DEF ARRAY_MAXDIM = 6 # defined in postgresql/src/includes/c.h + +# "NULL" +cdef Py_UCS4 *APG_NULL = [0x004E, 0x0055, 0x004C, 0x004C, 0x0000] + + +ctypedef object (*encode_func_ex)(ConnectionSettings settings, + WriteBuffer buf, + object obj, + const void *arg) + + +ctypedef object (*decode_func_ex)(ConnectionSettings settings, + FRBuffer *buf, + const void *arg) + + +cdef inline bint _is_trivial_container(object obj): + return cpython.PyUnicode_Check(obj) or cpython.PyBytes_Check(obj) or \ + cpythonx.PyByteArray_Check(obj) or cpythonx.PyMemoryView_Check(obj) + + +cdef inline _is_array_iterable(object obj): + return ( + isinstance(obj, IterableABC) and + isinstance(obj, SizedABC) and + not _is_trivial_container(obj) and + not isinstance(obj, MappingABC) + ) + + +cdef inline _is_sub_array_iterable(object obj): + # Sub-arrays have a specialized check, because we treat + # nested tuples as records. + return _is_array_iterable(obj) and not cpython.PyTuple_Check(obj) + + +cdef _get_array_shape(object obj, int32_t *dims, int32_t *ndims): + cdef: + ssize_t mylen = len(obj) + ssize_t elemlen = -2 + object it + + if mylen > _MAXINT32: + raise ValueError('too many elements in array value') + + if ndims[0] > ARRAY_MAXDIM: + raise ValueError( + 'number of array dimensions ({}) exceed the maximum expected ({})'. + format(ndims[0], ARRAY_MAXDIM)) + + dims[ndims[0] - 1] = mylen + + for elem in obj: + if _is_sub_array_iterable(elem): + if elemlen == -2: + elemlen = len(elem) + if elemlen > _MAXINT32: + raise ValueError('too many elements in array value') + ndims[0] += 1 + _get_array_shape(elem, dims, ndims) + else: + if len(elem) != elemlen: + raise ValueError('non-homogeneous array') + else: + if elemlen >= 0: + raise ValueError('non-homogeneous array') + else: + elemlen = -1 + + +cdef _write_array_data(ConnectionSettings settings, object obj, int32_t ndims, + int32_t dim, WriteBuffer elem_data, + encode_func_ex encoder, const void *encoder_arg): + if dim < ndims - 1: + for item in obj: + _write_array_data(settings, item, ndims, dim + 1, elem_data, + encoder, encoder_arg) + else: + for item in obj: + if item is None: + elem_data.write_int32(-1) + else: + try: + encoder(settings, elem_data, item, encoder_arg) + except TypeError as e: + raise ValueError( + 'invalid array element: {}'.format(e.args[0])) from None + + +cdef inline array_encode(ConnectionSettings settings, WriteBuffer buf, + object obj, uint32_t elem_oid, + encode_func_ex encoder, const void *encoder_arg): + cdef: + WriteBuffer elem_data + int32_t dims[ARRAY_MAXDIM] + int32_t ndims = 1 + int32_t i + + if not _is_array_iterable(obj): + raise TypeError( + 'a sized iterable container expected (got type {!r})'.format( + type(obj).__name__)) + + _get_array_shape(obj, dims, &ndims) + + elem_data = WriteBuffer.new() + + if ndims > 1: + _write_array_data(settings, obj, ndims, 0, elem_data, + encoder, encoder_arg) + else: + for i, item in enumerate(obj): + if item is None: + elem_data.write_int32(-1) + else: + try: + encoder(settings, elem_data, item, encoder_arg) + except TypeError as e: + raise ValueError( + 'invalid array element at index {}: {}'.format( + i, e.args[0])) from None + + buf.write_int32(12 + 8 * ndims + elem_data.len()) + # Number of dimensions + buf.write_int32(ndims) + # flags + buf.write_int32(0) + # element type + buf.write_int32(elem_oid) + # upper / lower bounds + for i in range(ndims): + buf.write_int32(dims[i]) + buf.write_int32(1) + # element data + buf.write_buffer(elem_data) + + +cdef _write_textarray_data(ConnectionSettings settings, object obj, + int32_t ndims, int32_t dim, WriteBuffer array_data, + encode_func_ex encoder, const void *encoder_arg, + Py_UCS4 typdelim): + cdef: + ssize_t i = 0 + int8_t delim = typdelim + WriteBuffer elem_data + Py_buffer pybuf + const char *elem_str + char ch + ssize_t elem_len + ssize_t quoted_elem_len + bint need_quoting + + array_data.write_byte(b'{') + + if dim < ndims - 1: + for item in obj: + if i > 0: + array_data.write_byte(delim) + array_data.write_byte(b' ') + _write_textarray_data(settings, item, ndims, dim + 1, array_data, + encoder, encoder_arg, typdelim) + i += 1 + else: + for item in obj: + elem_data = WriteBuffer.new() + + if i > 0: + array_data.write_byte(delim) + array_data.write_byte(b' ') + + if item is None: + array_data.write_bytes(b'NULL') + i += 1 + continue + else: + try: + encoder(settings, elem_data, item, encoder_arg) + except TypeError as e: + raise ValueError( + 'invalid array element: {}'.format( + e.args[0])) from None + + # element string length (first four bytes are the encoded length.) + elem_len = elem_data.len() - 4 + + if elem_len == 0: + # Empty string + array_data.write_bytes(b'""') + else: + cpython.PyObject_GetBuffer( + elem_data, &pybuf, cpython.PyBUF_SIMPLE) + + elem_str = (pybuf.buf) + 4 + + try: + if not apg_strcasecmp_char(elem_str, b'NULL'): + array_data.write_byte(b'"') + array_data.write_cstr(elem_str, 4) + array_data.write_byte(b'"') + else: + quoted_elem_len = elem_len + need_quoting = False + + for i in range(elem_len): + ch = elem_str[i] + if ch == b'"' or ch == b'\\': + # Quotes and backslashes need escaping. + quoted_elem_len += 1 + need_quoting = True + elif (ch == b'{' or ch == b'}' or ch == delim or + apg_ascii_isspace(ch)): + need_quoting = True + + if need_quoting: + array_data.write_byte(b'"') + + if quoted_elem_len == elem_len: + array_data.write_cstr(elem_str, elem_len) + else: + # Escaping required. + for i in range(elem_len): + ch = elem_str[i] + if ch == b'"' or ch == b'\\': + array_data.write_byte(b'\\') + array_data.write_byte(ch) + + array_data.write_byte(b'"') + else: + array_data.write_cstr(elem_str, elem_len) + finally: + cpython.PyBuffer_Release(&pybuf) + + i += 1 + + array_data.write_byte(b'}') + + +cdef inline textarray_encode(ConnectionSettings settings, WriteBuffer buf, + object obj, encode_func_ex encoder, + const void *encoder_arg, Py_UCS4 typdelim): + cdef: + WriteBuffer array_data + int32_t dims[ARRAY_MAXDIM] + int32_t ndims = 1 + int32_t i + + if not _is_array_iterable(obj): + raise TypeError( + 'a sized iterable container expected (got type {!r})'.format( + type(obj).__name__)) + + _get_array_shape(obj, dims, &ndims) + + array_data = WriteBuffer.new() + _write_textarray_data(settings, obj, ndims, 0, array_data, + encoder, encoder_arg, typdelim) + buf.write_int32(array_data.len()) + buf.write_buffer(array_data) + + +cdef inline array_decode(ConnectionSettings settings, FRBuffer *buf, + decode_func_ex decoder, const void *decoder_arg): + cdef: + int32_t ndims = hton.unpack_int32(frb_read(buf, 4)) + int32_t flags = hton.unpack_int32(frb_read(buf, 4)) + uint32_t elem_oid = hton.unpack_int32(frb_read(buf, 4)) + list result + int i + int32_t elem_len + int32_t elem_count = 1 + FRBuffer elem_buf + int32_t dims[ARRAY_MAXDIM] + Codec elem_codec + + if ndims == 0: + return [] + + if ndims > ARRAY_MAXDIM: + raise exceptions.ProtocolError( + 'number of array dimensions ({}) exceed the maximum expected ({})'. + format(ndims, ARRAY_MAXDIM)) + elif ndims < 0: + raise exceptions.ProtocolError( + 'unexpected array dimensions value: {}'.format(ndims)) + + for i in range(ndims): + dims[i] = hton.unpack_int32(frb_read(buf, 4)) + if dims[i] < 0: + raise exceptions.ProtocolError( + 'unexpected array dimension size: {}'.format(dims[i])) + # Ignore the lower bound information + frb_read(buf, 4) + + if ndims == 1: + # Fast path for flat arrays + elem_count = dims[0] + result = cpython.PyList_New(elem_count) + + for i in range(elem_count): + elem_len = hton.unpack_int32(frb_read(buf, 4)) + if elem_len == -1: + elem = None + else: + frb_slice_from(&elem_buf, buf, elem_len) + elem = decoder(settings, &elem_buf, decoder_arg) + + cpython.Py_INCREF(elem) + cpython.PyList_SET_ITEM(result, i, elem) + + else: + result = _nested_array_decode(settings, buf, + decoder, decoder_arg, ndims, dims, + &elem_buf) + + return result + + +cdef _nested_array_decode(ConnectionSettings settings, + FRBuffer *buf, + decode_func_ex decoder, + const void *decoder_arg, + int32_t ndims, int32_t *dims, + FRBuffer *elem_buf): + + cdef: + int32_t elem_len + int64_t i, j + int64_t array_len = 1 + object elem, stride + # An array of pointers to lists for each current array level. + void *strides[ARRAY_MAXDIM] + # An array of current positions at each array level. + int32_t indexes[ARRAY_MAXDIM] + + for i in range(ndims): + array_len *= dims[i] + indexes[i] = 0 + strides[i] = NULL + + if array_len == 0: + # A multidimensional array with a zero-sized dimension? + return [] + + elif array_len < 0: + # Array length overflow + raise exceptions.ProtocolError('array length overflow') + + for i in range(array_len): + # Decode the element. + elem_len = hton.unpack_int32(frb_read(buf, 4)) + if elem_len == -1: + elem = None + else: + elem = decoder(settings, + frb_slice_from(elem_buf, buf, elem_len), + decoder_arg) + + # Take an explicit reference for PyList_SET_ITEM in the below + # loop expects this. + cpython.Py_INCREF(elem) + + # Iterate over array dimentions and put the element in + # the correctly nested sublist. + for j in reversed(range(ndims)): + if indexes[j] == 0: + # Allocate the list for this array level. + stride = cpython.PyList_New(dims[j]) + + strides[j] = stride + # Take an explicit reference for PyList_SET_ITEM below + # expects this. + cpython.Py_INCREF(stride) + + stride = strides[j] + cpython.PyList_SET_ITEM(stride, indexes[j], elem) + indexes[j] += 1 + + if indexes[j] == dims[j] and j != 0: + # This array level is full, continue the + # ascent in the dimensions so that this level + # sublist will be appened to the parent list. + elem = stride + # Reset the index, this will cause the + # new list to be allocated on the next + # iteration on this array axis. + indexes[j] = 0 + else: + break + + stride = strides[0] + # Since each element in strides has a refcount of 1, + # returning strides[0] will increment it to 2, so + # balance that. + cpython.Py_DECREF(stride) + return stride + + +cdef textarray_decode(ConnectionSettings settings, FRBuffer *buf, + decode_func_ex decoder, const void *decoder_arg, + Py_UCS4 typdelim): + cdef: + Py_UCS4 *array_text + str s + + # Make a copy of array data since we will be mutating it for + # the purposes of element decoding. + s = pgproto.text_decode(settings, buf) + array_text = cpythonx.PyUnicode_AsUCS4Copy(s) + + try: + return _textarray_decode( + settings, array_text, decoder, decoder_arg, typdelim) + except ValueError as e: + raise exceptions.ProtocolError( + 'malformed array literal {!r}: {}'.format(s, e.args[0])) + finally: + cpython.PyMem_Free(array_text) + + +cdef _textarray_decode(ConnectionSettings settings, + Py_UCS4 *array_text, + decode_func_ex decoder, + const void *decoder_arg, + Py_UCS4 typdelim): + + cdef: + bytearray array_bytes + list result + list new_stride + Py_UCS4 *ptr + int32_t ndims = 0 + int32_t ubound = 0 + int32_t lbound = 0 + int32_t dims[ARRAY_MAXDIM] + int32_t inferred_dims[ARRAY_MAXDIM] + int32_t inferred_ndims = 0 + void *strides[ARRAY_MAXDIM] + int32_t indexes[ARRAY_MAXDIM] + int32_t nest_level = 0 + int32_t item_level = 0 + bint end_of_array = False + + bint end_of_item = False + bint has_quoting = False + bint strip_spaces = False + bint in_quotes = False + Py_UCS4 *item_start + Py_UCS4 *item_ptr + Py_UCS4 *item_end + + int i + object item + str item_text + FRBuffer item_buf + char *pg_item_str + ssize_t pg_item_len + + ptr = array_text + + while True: + while apg_ascii_isspace(ptr[0]): + ptr += 1 + + if ptr[0] != '[': + # Finished parsing dimensions spec. + break + + ptr += 1 # '[' + + if ndims > ARRAY_MAXDIM: + raise ValueError( + 'number of array dimensions ({}) exceed the ' + 'maximum expected ({})'.format(ndims, ARRAY_MAXDIM)) + + ptr = apg_parse_int32(ptr, &ubound) + if ptr == NULL: + raise ValueError('missing array dimension value') + + if ptr[0] == ':': + ptr += 1 + lbound = ubound + + # [lower:upper] spec. We disregard the lbound for decoding. + ptr = apg_parse_int32(ptr, &ubound) + if ptr == NULL: + raise ValueError('missing array dimension value') + else: + lbound = 1 + + if ptr[0] != ']': + raise ValueError('missing \']\' after array dimensions') + + ptr += 1 # ']' + + dims[ndims] = ubound - lbound + 1 + ndims += 1 + + if ndims != 0: + # If dimensions were given, the '=' token is expected. + if ptr[0] != '=': + raise ValueError('missing \'=\' after array dimensions') + + ptr += 1 # '=' + + # Skip any whitespace after the '=', whitespace + # before was consumed in the above loop. + while apg_ascii_isspace(ptr[0]): + ptr += 1 + + # Infer the dimensions from the brace structure in the + # array literal body, and check that it matches the explicit + # spec. This also validates that the array literal is sane. + _infer_array_dims(ptr, typdelim, inferred_dims, &inferred_ndims) + + if inferred_ndims != ndims: + raise ValueError( + 'specified array dimensions do not match array content') + + for i in range(ndims): + if inferred_dims[i] != dims[i]: + raise ValueError( + 'specified array dimensions do not match array content') + else: + # Infer the dimensions from the brace structure in the array literal + # body. This also validates that the array literal is sane. + _infer_array_dims(ptr, typdelim, dims, &ndims) + + while not end_of_array: + # We iterate over the literal character by character + # and modify the string in-place removing the array-specific + # quoting and determining the boundaries of each element. + end_of_item = has_quoting = in_quotes = False + strip_spaces = True + + # Pointers to array element start, end, and the current pointer + # tracking the position where characters are written when + # escaping is folded. + item_start = item_end = item_ptr = ptr + item_level = 0 + + while not end_of_item: + if ptr[0] == '"': + in_quotes = not in_quotes + if in_quotes: + strip_spaces = False + else: + item_end = item_ptr + has_quoting = True + + elif ptr[0] == '\\': + # Quoted character, collapse the backslash. + ptr += 1 + has_quoting = True + item_ptr[0] = ptr[0] + item_ptr += 1 + strip_spaces = False + item_end = item_ptr + + elif in_quotes: + # Consume the string until we see the closing quote. + item_ptr[0] = ptr[0] + item_ptr += 1 + + elif ptr[0] == '{': + # Nesting level increase. + nest_level += 1 + + indexes[nest_level - 1] = 0 + new_stride = cpython.PyList_New(dims[nest_level - 1]) + strides[nest_level - 1] = \ + (new_stride) + + if nest_level > 1: + cpython.Py_INCREF(new_stride) + cpython.PyList_SET_ITEM( + strides[nest_level - 2], + indexes[nest_level - 2], + new_stride) + else: + result = new_stride + + elif ptr[0] == '}': + if item_level == 0: + # Make sure we keep track of which nesting + # level the item belongs to, as the loop + # will continue to consume closing braces + # until the delimiter or the end of input. + item_level = nest_level + + nest_level -= 1 + + if nest_level == 0: + end_of_array = end_of_item = True + + elif ptr[0] == typdelim: + # Array element delimiter, + end_of_item = True + if item_level == 0: + item_level = nest_level + + elif apg_ascii_isspace(ptr[0]): + if not strip_spaces: + item_ptr[0] = ptr[0] + item_ptr += 1 + # Ignore the leading literal whitespace. + + else: + item_ptr[0] = ptr[0] + item_ptr += 1 + strip_spaces = False + item_end = item_ptr + + ptr += 1 + + # end while not end_of_item + + if item_end == item_start: + # Empty array + continue + + item_end[0] = '\0' + + if not has_quoting and apg_strcasecmp(item_start, APG_NULL) == 0: + # NULL element. + item = None + else: + # XXX: find a way to avoid the redundant encode/decode + # cycle here. + item_text = cpythonx.PyUnicode_FromKindAndData( + cpythonx.PyUnicode_4BYTE_KIND, + item_start, + item_end - item_start) + + # Prepare the element buffer and call the text decoder + # for the element type. + pgproto.as_pg_string_and_size( + settings, item_text, &pg_item_str, &pg_item_len) + frb_init(&item_buf, pg_item_str, pg_item_len) + item = decoder(settings, &item_buf, decoder_arg) + + # Place the decoded element in the array. + cpython.Py_INCREF(item) + cpython.PyList_SET_ITEM( + strides[item_level - 1], + indexes[item_level - 1], + item) + + if nest_level > 0: + indexes[nest_level - 1] += 1 + + return result + + +cdef enum _ArrayParseState: + APS_START = 1 + APS_STRIDE_STARTED = 2 + APS_STRIDE_DONE = 3 + APS_STRIDE_DELIMITED = 4 + APS_ELEM_STARTED = 5 + APS_ELEM_DELIMITED = 6 + + +cdef _UnexpectedCharacter(const Py_UCS4 *array_text, const Py_UCS4 *ptr): + return ValueError('unexpected character {!r} at position {}'.format( + cpython.PyUnicode_FromOrdinal(ptr[0]), ptr - array_text + 1)) + + +cdef _infer_array_dims(const Py_UCS4 *array_text, + Py_UCS4 typdelim, + int32_t *dims, + int32_t *ndims): + cdef: + const Py_UCS4 *ptr = array_text + int i + int nest_level = 0 + bint end_of_array = False + bint end_of_item = False + bint in_quotes = False + bint array_is_empty = True + int stride_len[ARRAY_MAXDIM] + int prev_stride_len[ARRAY_MAXDIM] + _ArrayParseState parse_state = APS_START + + for i in range(ARRAY_MAXDIM): + dims[i] = prev_stride_len[i] = 0 + stride_len[i] = 1 + + while not end_of_array: + end_of_item = False + + while not end_of_item: + if ptr[0] == '\0': + raise ValueError('unexpected end of string') + + elif ptr[0] == '"': + if (parse_state not in (APS_STRIDE_STARTED, + APS_ELEM_DELIMITED) and + not (parse_state == APS_ELEM_STARTED and in_quotes)): + raise _UnexpectedCharacter(array_text, ptr) + + in_quotes = not in_quotes + if in_quotes: + parse_state = APS_ELEM_STARTED + array_is_empty = False + + elif ptr[0] == '\\': + if parse_state not in (APS_STRIDE_STARTED, + APS_ELEM_STARTED, + APS_ELEM_DELIMITED): + raise _UnexpectedCharacter(array_text, ptr) + + parse_state = APS_ELEM_STARTED + array_is_empty = False + + if ptr[1] != '\0': + ptr += 1 + else: + raise ValueError('unexpected end of string') + + elif in_quotes: + # Ignore everything inside the quotes. + pass + + elif ptr[0] == '{': + if parse_state not in (APS_START, + APS_STRIDE_STARTED, + APS_STRIDE_DELIMITED): + raise _UnexpectedCharacter(array_text, ptr) + + parse_state = APS_STRIDE_STARTED + if nest_level >= ARRAY_MAXDIM: + raise ValueError( + 'number of array dimensions ({}) exceed the ' + 'maximum expected ({})'.format( + nest_level, ARRAY_MAXDIM)) + + dims[nest_level] = 0 + nest_level += 1 + if ndims[0] < nest_level: + ndims[0] = nest_level + + elif ptr[0] == '}': + if (parse_state not in (APS_ELEM_STARTED, APS_STRIDE_DONE) and + not (nest_level == 1 and + parse_state == APS_STRIDE_STARTED)): + raise _UnexpectedCharacter(array_text, ptr) + + parse_state = APS_STRIDE_DONE + + if nest_level == 0: + raise _UnexpectedCharacter(array_text, ptr) + + nest_level -= 1 + + if (prev_stride_len[nest_level] != 0 and + stride_len[nest_level] != prev_stride_len[nest_level]): + raise ValueError( + 'inconsistent sub-array dimensions' + ' at position {}'.format( + ptr - array_text + 1)) + + prev_stride_len[nest_level] = stride_len[nest_level] + stride_len[nest_level] = 1 + if nest_level == 0: + end_of_array = end_of_item = True + else: + dims[nest_level - 1] += 1 + + elif ptr[0] == typdelim: + if parse_state not in (APS_ELEM_STARTED, APS_STRIDE_DONE): + raise _UnexpectedCharacter(array_text, ptr) + + if parse_state == APS_STRIDE_DONE: + parse_state = APS_STRIDE_DELIMITED + else: + parse_state = APS_ELEM_DELIMITED + end_of_item = True + stride_len[nest_level - 1] += 1 + + elif not apg_ascii_isspace(ptr[0]): + if parse_state not in (APS_STRIDE_STARTED, + APS_ELEM_STARTED, + APS_ELEM_DELIMITED): + raise _UnexpectedCharacter(array_text, ptr) + + parse_state = APS_ELEM_STARTED + array_is_empty = False + + if not end_of_item: + ptr += 1 + + if not array_is_empty: + dims[ndims[0] - 1] += 1 + + ptr += 1 + + # only whitespace is allowed after the closing brace + while ptr[0] != '\0': + if not apg_ascii_isspace(ptr[0]): + raise _UnexpectedCharacter(array_text, ptr) + + ptr += 1 + + if array_is_empty: + ndims[0] = 0 + + +cdef uint4_encode_ex(ConnectionSettings settings, WriteBuffer buf, object obj, + const void *arg): + return pgproto.uint4_encode(settings, buf, obj) + + +cdef uint4_decode_ex(ConnectionSettings settings, FRBuffer *buf, + const void *arg): + return pgproto.uint4_decode(settings, buf) + + +cdef arrayoid_encode(ConnectionSettings settings, WriteBuffer buf, items): + array_encode(settings, buf, items, OIDOID, + &uint4_encode_ex, NULL) + + +cdef arrayoid_decode(ConnectionSettings settings, FRBuffer *buf): + return array_decode(settings, buf, &uint4_decode_ex, NULL) + + +cdef text_encode_ex(ConnectionSettings settings, WriteBuffer buf, object obj, + const void *arg): + return pgproto.text_encode(settings, buf, obj) + + +cdef text_decode_ex(ConnectionSettings settings, FRBuffer *buf, + const void *arg): + return pgproto.text_decode(settings, buf) + + +cdef arraytext_encode(ConnectionSettings settings, WriteBuffer buf, items): + array_encode(settings, buf, items, TEXTOID, + &text_encode_ex, NULL) + + +cdef arraytext_decode(ConnectionSettings settings, FRBuffer *buf): + return array_decode(settings, buf, &text_decode_ex, NULL) + + +cdef init_array_codecs(): + # oid[] and text[] are registered as core codecs + # to make type introspection query work + # + register_core_codec(_OIDOID, + &arrayoid_encode, + &arrayoid_decode, + PG_FORMAT_BINARY) + + register_core_codec(_TEXTOID, + &arraytext_encode, + &arraytext_decode, + PG_FORMAT_BINARY) + +init_array_codecs() diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pxd new file mode 100644 index 0000000..16928b8 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pxd @@ -0,0 +1,184 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +ctypedef object (*encode_func)(ConnectionSettings settings, + WriteBuffer buf, + object obj) + +ctypedef object (*decode_func)(ConnectionSettings settings, + FRBuffer *buf) + +ctypedef object (*codec_encode_func)(Codec codec, + ConnectionSettings settings, + WriteBuffer buf, + object obj) + +ctypedef object (*codec_decode_func)(Codec codec, + ConnectionSettings settings, + FRBuffer *buf) + + +cdef enum CodecType: + CODEC_UNDEFINED = 0 + CODEC_C = 1 + CODEC_PY = 2 + CODEC_ARRAY = 3 + CODEC_COMPOSITE = 4 + CODEC_RANGE = 5 + CODEC_MULTIRANGE = 6 + + +cdef enum ServerDataFormat: + PG_FORMAT_ANY = -1 + PG_FORMAT_TEXT = 0 + PG_FORMAT_BINARY = 1 + + +cdef enum ClientExchangeFormat: + PG_XFORMAT_OBJECT = 1 + PG_XFORMAT_TUPLE = 2 + + +cdef class Codec: + cdef: + uint32_t oid + + str name + str schema + str kind + + CodecType type + ServerDataFormat format + ClientExchangeFormat xformat + + encode_func c_encoder + decode_func c_decoder + + object py_encoder + object py_decoder + + # arrays + Codec element_codec + Py_UCS4 element_delimiter + + # composite types + tuple element_type_oids + object element_names + object record_desc + list element_codecs + + # Pointers to actual encoder/decoder functions for this codec + codec_encode_func encoder + codec_decode_func decoder + + cdef init(self, str name, str schema, str kind, + CodecType type, ServerDataFormat format, + ClientExchangeFormat xformat, + encode_func c_encoder, decode_func c_decoder, + object py_encoder, object py_decoder, + Codec element_codec, tuple element_type_oids, + object element_names, list element_codecs, + Py_UCS4 element_delimiter) + + cdef encode_scalar(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_array(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_array_text(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef encode_in_python(self, ConnectionSettings settings, WriteBuffer buf, + object obj) + + cdef decode_scalar(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_array(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_array_text(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_composite(self, ConnectionSettings settings, FRBuffer *buf) + + cdef decode_in_python(self, ConnectionSettings settings, FRBuffer *buf) + + cdef inline encode(self, + ConnectionSettings settings, + WriteBuffer buf, + object obj) + + cdef inline decode(self, ConnectionSettings settings, FRBuffer *buf) + + cdef has_encoder(self) + cdef has_decoder(self) + cdef is_binary(self) + + cdef inline Codec copy(self) + + @staticmethod + cdef Codec new_array_codec(uint32_t oid, + str name, + str schema, + Codec element_codec, + Py_UCS4 element_delimiter) + + @staticmethod + cdef Codec new_range_codec(uint32_t oid, + str name, + str schema, + Codec element_codec) + + @staticmethod + cdef Codec new_multirange_codec(uint32_t oid, + str name, + str schema, + Codec element_codec) + + @staticmethod + cdef Codec new_composite_codec(uint32_t oid, + str name, + str schema, + ServerDataFormat format, + list element_codecs, + tuple element_type_oids, + object element_names) + + @staticmethod + cdef Codec new_python_codec(uint32_t oid, + str name, + str schema, + str kind, + object encoder, + object decoder, + encode_func c_encoder, + decode_func c_decoder, + ServerDataFormat format, + ClientExchangeFormat xformat) + + +cdef class DataCodecConfig: + cdef: + dict _derived_type_codecs + dict _custom_type_codecs + + cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, + bint ignore_custom_codec=*) + cdef inline Codec get_custom_codec(self, uint32_t oid, + ServerDataFormat format) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pyx new file mode 100644 index 0000000..273b27a --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/base.pyx @@ -0,0 +1,861 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from collections.abc import Mapping as MappingABC + +import asyncpg +from asyncpg import exceptions + + +cdef void* binary_codec_map[(MAXSUPPORTEDOID + 1) * 2] +cdef void* text_codec_map[(MAXSUPPORTEDOID + 1) * 2] +cdef dict EXTRA_CODECS = {} + + +@cython.final +cdef class Codec: + + def __cinit__(self, uint32_t oid): + self.oid = oid + self.type = CODEC_UNDEFINED + + cdef init(self, str name, str schema, str kind, + CodecType type, ServerDataFormat format, + ClientExchangeFormat xformat, + encode_func c_encoder, decode_func c_decoder, + object py_encoder, object py_decoder, + Codec element_codec, tuple element_type_oids, + object element_names, list element_codecs, + Py_UCS4 element_delimiter): + + self.name = name + self.schema = schema + self.kind = kind + self.type = type + self.format = format + self.xformat = xformat + self.c_encoder = c_encoder + self.c_decoder = c_decoder + self.py_encoder = py_encoder + self.py_decoder = py_decoder + self.element_codec = element_codec + self.element_type_oids = element_type_oids + self.element_codecs = element_codecs + self.element_delimiter = element_delimiter + self.element_names = element_names + + if element_names is not None: + self.record_desc = record.ApgRecordDesc_New( + element_names, tuple(element_names)) + else: + self.record_desc = None + + if type == CODEC_C: + self.encoder = &self.encode_scalar + self.decoder = &self.decode_scalar + elif type == CODEC_ARRAY: + if format == PG_FORMAT_BINARY: + self.encoder = &self.encode_array + self.decoder = &self.decode_array + else: + self.encoder = &self.encode_array_text + self.decoder = &self.decode_array_text + elif type == CODEC_RANGE: + if format != PG_FORMAT_BINARY: + raise exceptions.UnsupportedClientFeatureError( + 'cannot decode type "{}"."{}": text encoding of ' + 'range types is not supported'.format(schema, name)) + self.encoder = &self.encode_range + self.decoder = &self.decode_range + elif type == CODEC_MULTIRANGE: + if format != PG_FORMAT_BINARY: + raise exceptions.UnsupportedClientFeatureError( + 'cannot decode type "{}"."{}": text encoding of ' + 'range types is not supported'.format(schema, name)) + self.encoder = &self.encode_multirange + self.decoder = &self.decode_multirange + elif type == CODEC_COMPOSITE: + if format != PG_FORMAT_BINARY: + raise exceptions.UnsupportedClientFeatureError( + 'cannot decode type "{}"."{}": text encoding of ' + 'composite types is not supported'.format(schema, name)) + self.encoder = &self.encode_composite + self.decoder = &self.decode_composite + elif type == CODEC_PY: + self.encoder = &self.encode_in_python + self.decoder = &self.decode_in_python + else: + raise exceptions.InternalClientError( + 'unexpected codec type: {}'.format(type)) + + cdef Codec copy(self): + cdef Codec codec + + codec = Codec(self.oid) + codec.init(self.name, self.schema, self.kind, + self.type, self.format, self.xformat, + self.c_encoder, self.c_decoder, + self.py_encoder, self.py_decoder, + self.element_codec, + self.element_type_oids, self.element_names, + self.element_codecs, self.element_delimiter) + + return codec + + cdef encode_scalar(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + self.c_encoder(settings, buf, obj) + + cdef encode_array(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + array_encode(settings, buf, obj, self.element_codec.oid, + codec_encode_func_ex, + (self.element_codec)) + + cdef encode_array_text(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + return textarray_encode(settings, buf, obj, + codec_encode_func_ex, + (self.element_codec), + self.element_delimiter) + + cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + range_encode(settings, buf, obj, self.element_codec.oid, + codec_encode_func_ex, + (self.element_codec)) + + cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + multirange_encode(settings, buf, obj, self.element_codec.oid, + codec_encode_func_ex, + (self.element_codec)) + + cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + cdef: + WriteBuffer elem_data + int i + list elem_codecs = self.element_codecs + ssize_t count + ssize_t composite_size + tuple rec + + if isinstance(obj, MappingABC): + # Input is dict-like, form a tuple + composite_size = len(self.element_type_oids) + rec = cpython.PyTuple_New(composite_size) + + for i in range(composite_size): + cpython.Py_INCREF(None) + cpython.PyTuple_SET_ITEM(rec, i, None) + + for field in obj: + try: + i = self.element_names[field] + except KeyError: + raise ValueError( + '{!r} is not a valid element of composite ' + 'type {}'.format(field, self.name)) from None + + item = obj[field] + cpython.Py_INCREF(item) + cpython.PyTuple_SET_ITEM(rec, i, item) + + obj = rec + + count = len(obj) + if count > _MAXINT32: + raise ValueError('too many elements in composite type record') + + elem_data = WriteBuffer.new() + i = 0 + for item in obj: + elem_data.write_int32(self.element_type_oids[i]) + if item is None: + elem_data.write_int32(-1) + else: + (elem_codecs[i]).encode(settings, elem_data, item) + i += 1 + + record_encode_frame(settings, buf, elem_data, count) + + cdef encode_in_python(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + data = self.py_encoder(obj) + if self.xformat == PG_XFORMAT_OBJECT: + if self.format == PG_FORMAT_BINARY: + pgproto.bytea_encode(settings, buf, data) + elif self.format == PG_FORMAT_TEXT: + pgproto.text_encode(settings, buf, data) + else: + raise exceptions.InternalClientError( + 'unexpected data format: {}'.format(self.format)) + elif self.xformat == PG_XFORMAT_TUPLE: + self.c_encoder(settings, buf, data) + else: + raise exceptions.InternalClientError( + 'unexpected exchange format: {}'.format(self.xformat)) + + cdef encode(self, ConnectionSettings settings, WriteBuffer buf, + object obj): + return self.encoder(self, settings, buf, obj) + + cdef decode_scalar(self, ConnectionSettings settings, FRBuffer *buf): + return self.c_decoder(settings, buf) + + cdef decode_array(self, ConnectionSettings settings, FRBuffer *buf): + return array_decode(settings, buf, codec_decode_func_ex, + (self.element_codec)) + + cdef decode_array_text(self, ConnectionSettings settings, + FRBuffer *buf): + return textarray_decode(settings, buf, codec_decode_func_ex, + (self.element_codec), + self.element_delimiter) + + cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf): + return range_decode(settings, buf, codec_decode_func_ex, + (self.element_codec)) + + cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf): + return multirange_decode(settings, buf, codec_decode_func_ex, + (self.element_codec)) + + cdef decode_composite(self, ConnectionSettings settings, + FRBuffer *buf): + cdef: + object result + ssize_t elem_count + ssize_t i + int32_t elem_len + uint32_t elem_typ + uint32_t received_elem_typ + Codec elem_codec + FRBuffer elem_buf + + elem_count = hton.unpack_int32(frb_read(buf, 4)) + if elem_count != len(self.element_type_oids): + raise exceptions.OutdatedSchemaCacheError( + 'unexpected number of attributes of composite type: ' + '{}, expected {}' + .format( + elem_count, + len(self.element_type_oids), + ), + schema=self.schema, + data_type=self.name, + ) + result = record.ApgRecord_New(asyncpg.Record, self.record_desc, elem_count) + for i in range(elem_count): + elem_typ = self.element_type_oids[i] + received_elem_typ = hton.unpack_int32(frb_read(buf, 4)) + + if received_elem_typ != elem_typ: + raise exceptions.OutdatedSchemaCacheError( + 'unexpected data type of composite type attribute {}: ' + '{!r}, expected {!r}' + .format( + i, + BUILTIN_TYPE_OID_MAP.get( + received_elem_typ, received_elem_typ), + BUILTIN_TYPE_OID_MAP.get( + elem_typ, elem_typ) + ), + schema=self.schema, + data_type=self.name, + position=i, + ) + + elem_len = hton.unpack_int32(frb_read(buf, 4)) + if elem_len == -1: + elem = None + else: + elem_codec = self.element_codecs[i] + elem = elem_codec.decode( + settings, frb_slice_from(&elem_buf, buf, elem_len)) + + cpython.Py_INCREF(elem) + record.ApgRecord_SET_ITEM(result, i, elem) + + return result + + cdef decode_in_python(self, ConnectionSettings settings, + FRBuffer *buf): + if self.xformat == PG_XFORMAT_OBJECT: + if self.format == PG_FORMAT_BINARY: + data = pgproto.bytea_decode(settings, buf) + elif self.format == PG_FORMAT_TEXT: + data = pgproto.text_decode(settings, buf) + else: + raise exceptions.InternalClientError( + 'unexpected data format: {}'.format(self.format)) + elif self.xformat == PG_XFORMAT_TUPLE: + data = self.c_decoder(settings, buf) + else: + raise exceptions.InternalClientError( + 'unexpected exchange format: {}'.format(self.xformat)) + + return self.py_decoder(data) + + cdef inline decode(self, ConnectionSettings settings, FRBuffer *buf): + return self.decoder(self, settings, buf) + + cdef inline has_encoder(self): + cdef Codec elem_codec + + if self.c_encoder is not NULL or self.py_encoder is not None: + return True + + elif ( + self.type == CODEC_ARRAY + or self.type == CODEC_RANGE + or self.type == CODEC_MULTIRANGE + ): + return self.element_codec.has_encoder() + + elif self.type == CODEC_COMPOSITE: + for elem_codec in self.element_codecs: + if not elem_codec.has_encoder(): + return False + return True + + else: + return False + + cdef has_decoder(self): + cdef Codec elem_codec + + if self.c_decoder is not NULL or self.py_decoder is not None: + return True + + elif ( + self.type == CODEC_ARRAY + or self.type == CODEC_RANGE + or self.type == CODEC_MULTIRANGE + ): + return self.element_codec.has_decoder() + + elif self.type == CODEC_COMPOSITE: + for elem_codec in self.element_codecs: + if not elem_codec.has_decoder(): + return False + return True + + else: + return False + + cdef is_binary(self): + return self.format == PG_FORMAT_BINARY + + def __repr__(self): + return ''.format( + self.oid, + 'NA' if self.element_codec is None else self.element_codec.oid, + has_core_codec(self.oid)) + + @staticmethod + cdef Codec new_array_codec(uint32_t oid, + str name, + str schema, + Codec element_codec, + Py_UCS4 element_delimiter): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, 'array', CODEC_ARRAY, element_codec.format, + PG_XFORMAT_OBJECT, NULL, NULL, None, None, element_codec, + None, None, None, element_delimiter) + return codec + + @staticmethod + cdef Codec new_range_codec(uint32_t oid, + str name, + str schema, + Codec element_codec): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, 'range', CODEC_RANGE, element_codec.format, + PG_XFORMAT_OBJECT, NULL, NULL, None, None, element_codec, + None, None, None, 0) + return codec + + @staticmethod + cdef Codec new_multirange_codec(uint32_t oid, + str name, + str schema, + Codec element_codec): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, 'multirange', CODEC_MULTIRANGE, + element_codec.format, PG_XFORMAT_OBJECT, NULL, NULL, + None, None, element_codec, None, None, None, 0) + return codec + + @staticmethod + cdef Codec new_composite_codec(uint32_t oid, + str name, + str schema, + ServerDataFormat format, + list element_codecs, + tuple element_type_oids, + object element_names): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, 'composite', CODEC_COMPOSITE, + format, PG_XFORMAT_OBJECT, NULL, NULL, None, None, None, + element_type_oids, element_names, element_codecs, 0) + return codec + + @staticmethod + cdef Codec new_python_codec(uint32_t oid, + str name, + str schema, + str kind, + object encoder, + object decoder, + encode_func c_encoder, + decode_func c_decoder, + ServerDataFormat format, + ClientExchangeFormat xformat): + cdef Codec codec + codec = Codec(oid) + codec.init(name, schema, kind, CODEC_PY, format, xformat, + c_encoder, c_decoder, encoder, decoder, + None, None, None, None, 0) + return codec + + +# Encode callback for arrays +cdef codec_encode_func_ex(ConnectionSettings settings, WriteBuffer buf, + object obj, const void *arg): + return (arg).encode(settings, buf, obj) + + +# Decode callback for arrays +cdef codec_decode_func_ex(ConnectionSettings settings, FRBuffer *buf, + const void *arg): + return (arg).decode(settings, buf) + + +cdef uint32_t pylong_as_oid(val) except? 0xFFFFFFFFl: + cdef: + int64_t oid = 0 + bint overflow = False + + try: + oid = cpython.PyLong_AsLongLong(val) + except OverflowError: + overflow = True + + if overflow or (oid < 0 or oid > UINT32_MAX): + raise OverflowError('OID value too large: {!r}'.format(val)) + + return val + + +cdef class DataCodecConfig: + def __init__(self, cache_key): + # Codec instance cache for derived types: + # composites, arrays, ranges, domains and their combinations. + self._derived_type_codecs = {} + # Codec instances set up by the user for the connection. + self._custom_type_codecs = {} + + def add_types(self, types): + cdef: + Codec elem_codec + list comp_elem_codecs + ServerDataFormat format + ServerDataFormat elem_format + bint has_text_elements + Py_UCS4 elem_delim + + for ti in types: + oid = ti['oid'] + + if self.get_codec(oid, PG_FORMAT_ANY) is not None: + continue + + name = ti['name'] + schema = ti['ns'] + array_element_oid = ti['elemtype'] + range_subtype_oid = ti['range_subtype'] + if ti['attrtypoids']: + comp_type_attrs = tuple(ti['attrtypoids']) + else: + comp_type_attrs = None + base_type = ti['basetype'] + + if array_element_oid: + # Array type (note, there is no separate 'kind' for arrays) + + # Canonicalize type name to "elemtype[]" + if name.startswith('_'): + name = name[1:] + name = '{}[]'.format(name) + + elem_codec = self.get_codec(array_element_oid, PG_FORMAT_ANY) + if elem_codec is None: + elem_codec = self.declare_fallback_codec( + array_element_oid, ti['elemtype_name'], schema) + + elem_delim = ti['elemdelim'][0] + + self._derived_type_codecs[oid, elem_codec.format] = \ + Codec.new_array_codec( + oid, name, schema, elem_codec, elem_delim) + + elif ti['kind'] == b'c': + # Composite type + + if not comp_type_attrs: + raise exceptions.InternalClientError( + f'type record missing field types for composite {oid}') + + comp_elem_codecs = [] + has_text_elements = False + + for typoid in comp_type_attrs: + elem_codec = self.get_codec(typoid, PG_FORMAT_ANY) + if elem_codec is None: + raise exceptions.InternalClientError( + f'no codec for composite attribute type {typoid}') + if elem_codec.format is PG_FORMAT_TEXT: + has_text_elements = True + comp_elem_codecs.append(elem_codec) + + element_names = collections.OrderedDict() + for i, attrname in enumerate(ti['attrnames']): + element_names[attrname] = i + + # If at least one element is text-encoded, we must + # encode the whole composite as text. + if has_text_elements: + elem_format = PG_FORMAT_TEXT + else: + elem_format = PG_FORMAT_BINARY + + self._derived_type_codecs[oid, elem_format] = \ + Codec.new_composite_codec( + oid, name, schema, elem_format, comp_elem_codecs, + comp_type_attrs, element_names) + + elif ti['kind'] == b'd': + # Domain type + + if not base_type: + raise exceptions.InternalClientError( + f'type record missing base type for domain {oid}') + + elem_codec = self.get_codec(base_type, PG_FORMAT_ANY) + if elem_codec is None: + elem_codec = self.declare_fallback_codec( + base_type, ti['basetype_name'], schema) + + self._derived_type_codecs[oid, elem_codec.format] = elem_codec + + elif ti['kind'] == b'r': + # Range type + + if not range_subtype_oid: + raise exceptions.InternalClientError( + f'type record missing base type for range {oid}') + + elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY) + if elem_codec is None: + elem_codec = self.declare_fallback_codec( + range_subtype_oid, ti['range_subtype_name'], schema) + + self._derived_type_codecs[oid, elem_codec.format] = \ + Codec.new_range_codec(oid, name, schema, elem_codec) + + elif ti['kind'] == b'm': + # Multirange type + + if not range_subtype_oid: + raise exceptions.InternalClientError( + f'type record missing base type for multirange {oid}') + + elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY) + if elem_codec is None: + elem_codec = self.declare_fallback_codec( + range_subtype_oid, ti['range_subtype_name'], schema) + + self._derived_type_codecs[oid, elem_codec.format] = \ + Codec.new_multirange_codec(oid, name, schema, elem_codec) + + elif ti['kind'] == b'e': + # Enum types are essentially text + self._set_builtin_type_codec(oid, name, schema, 'scalar', + TEXTOID, PG_FORMAT_ANY) + else: + self.declare_fallback_codec(oid, name, schema) + + def add_python_codec(self, typeoid, typename, typeschema, typekind, + encoder, decoder, format, xformat): + cdef: + Codec core_codec + encode_func c_encoder = NULL + decode_func c_decoder = NULL + uint32_t oid = pylong_as_oid(typeoid) + bint codec_set = False + + # Clear all previous overrides (this also clears type cache). + self.remove_python_codec(typeoid, typename, typeschema) + + if format == PG_FORMAT_ANY: + formats = (PG_FORMAT_TEXT, PG_FORMAT_BINARY) + else: + formats = (format,) + + for fmt in formats: + if xformat == PG_XFORMAT_TUPLE: + core_codec = get_core_codec(oid, fmt, xformat) + if core_codec is None: + continue + c_encoder = core_codec.c_encoder + c_decoder = core_codec.c_decoder + + self._custom_type_codecs[typeoid, fmt] = \ + Codec.new_python_codec(oid, typename, typeschema, typekind, + encoder, decoder, c_encoder, c_decoder, + fmt, xformat) + codec_set = True + + if not codec_set: + raise exceptions.InterfaceError( + "{} type does not support the 'tuple' exchange format".format( + typename)) + + def remove_python_codec(self, typeoid, typename, typeschema): + for fmt in (PG_FORMAT_BINARY, PG_FORMAT_TEXT): + self._custom_type_codecs.pop((typeoid, fmt), None) + self.clear_type_cache() + + def _set_builtin_type_codec(self, typeoid, typename, typeschema, typekind, + alias_to, format=PG_FORMAT_ANY): + cdef: + Codec codec + Codec target_codec + uint32_t oid = pylong_as_oid(typeoid) + uint32_t alias_oid = 0 + bint codec_set = False + + if format == PG_FORMAT_ANY: + formats = (PG_FORMAT_BINARY, PG_FORMAT_TEXT) + else: + formats = (format,) + + if isinstance(alias_to, int): + alias_oid = pylong_as_oid(alias_to) + else: + alias_oid = BUILTIN_TYPE_NAME_MAP.get(alias_to, 0) + + for format in formats: + if alias_oid != 0: + target_codec = self.get_codec(alias_oid, format) + else: + target_codec = get_extra_codec(alias_to, format) + + if target_codec is None: + continue + + codec = target_codec.copy() + codec.oid = typeoid + codec.name = typename + codec.schema = typeschema + codec.kind = typekind + + self._custom_type_codecs[typeoid, format] = codec + codec_set = True + + if not codec_set: + if format == PG_FORMAT_BINARY: + codec_str = 'binary' + elif format == PG_FORMAT_TEXT: + codec_str = 'text' + else: + codec_str = 'text or binary' + + raise exceptions.InterfaceError( + f'cannot alias {typename} to {alias_to}: ' + f'there is no {codec_str} codec for {alias_to}') + + def set_builtin_type_codec(self, typeoid, typename, typeschema, typekind, + alias_to, format=PG_FORMAT_ANY): + self._set_builtin_type_codec(typeoid, typename, typeschema, typekind, + alias_to, format) + self.clear_type_cache() + + def clear_type_cache(self): + self._derived_type_codecs.clear() + + def declare_fallback_codec(self, uint32_t oid, str name, str schema): + cdef Codec codec + + if oid <= MAXBUILTINOID: + # This is a BKI type, for which asyncpg has no + # defined codec. This should only happen for newly + # added builtin types, for which this version of + # asyncpg is lacking support. + # + raise exceptions.UnsupportedClientFeatureError( + f'unhandled standard data type {name!r} (OID {oid})') + else: + # This is a non-BKI type, and as such, has no + # stable OID, so no possibility of a builtin codec. + # In this case, fallback to text format. Applications + # can avoid this by specifying a codec for this type + # using Connection.set_type_codec(). + # + self._set_builtin_type_codec(oid, name, schema, 'scalar', + TEXTOID, PG_FORMAT_TEXT) + + codec = self.get_codec(oid, PG_FORMAT_TEXT) + + return codec + + cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, + bint ignore_custom_codec=False): + cdef Codec codec + + if format == PG_FORMAT_ANY: + codec = self.get_codec( + oid, PG_FORMAT_BINARY, ignore_custom_codec) + if codec is None: + codec = self.get_codec( + oid, PG_FORMAT_TEXT, ignore_custom_codec) + return codec + else: + if not ignore_custom_codec: + codec = self.get_custom_codec(oid, PG_FORMAT_ANY) + if codec is not None: + if codec.format != format: + # The codec for this OID has been overridden by + # set_{builtin}_type_codec with a different format. + # We must respect that and not return a core codec. + return None + else: + return codec + + codec = get_core_codec(oid, format) + if codec is not None: + return codec + else: + try: + return self._derived_type_codecs[oid, format] + except KeyError: + return None + + cdef inline Codec get_custom_codec( + self, + uint32_t oid, + ServerDataFormat format + ): + cdef Codec codec + + if format == PG_FORMAT_ANY: + codec = self.get_custom_codec(oid, PG_FORMAT_BINARY) + if codec is None: + codec = self.get_custom_codec(oid, PG_FORMAT_TEXT) + else: + codec = self._custom_type_codecs.get((oid, format)) + + return codec + + +cdef inline Codec get_core_codec( + uint32_t oid, ServerDataFormat format, + ClientExchangeFormat xformat=PG_XFORMAT_OBJECT): + cdef: + void *ptr = NULL + + if oid > MAXSUPPORTEDOID: + return None + if format == PG_FORMAT_BINARY: + ptr = binary_codec_map[oid * xformat] + elif format == PG_FORMAT_TEXT: + ptr = text_codec_map[oid * xformat] + + if ptr is NULL: + return None + else: + return ptr + + +cdef inline Codec get_any_core_codec( + uint32_t oid, ServerDataFormat format, + ClientExchangeFormat xformat=PG_XFORMAT_OBJECT): + """A version of get_core_codec that accepts PG_FORMAT_ANY.""" + cdef: + Codec codec + + if format == PG_FORMAT_ANY: + codec = get_core_codec(oid, PG_FORMAT_BINARY, xformat) + if codec is None: + codec = get_core_codec(oid, PG_FORMAT_TEXT, xformat) + else: + codec = get_core_codec(oid, format, xformat) + + return codec + + +cdef inline int has_core_codec(uint32_t oid): + return binary_codec_map[oid] != NULL or text_codec_map[oid] != NULL + + +cdef register_core_codec(uint32_t oid, + encode_func encode, + decode_func decode, + ServerDataFormat format, + ClientExchangeFormat xformat=PG_XFORMAT_OBJECT): + + if oid > MAXSUPPORTEDOID: + raise exceptions.InternalClientError( + 'cannot register core codec for OID {}: it is greater ' + 'than MAXSUPPORTEDOID ({})'.format(oid, MAXSUPPORTEDOID)) + + cdef: + Codec codec + str name + str kind + + name = BUILTIN_TYPE_OID_MAP[oid] + kind = 'array' if oid in ARRAY_TYPES else 'scalar' + + codec = Codec(oid) + codec.init(name, 'pg_catalog', kind, CODEC_C, format, xformat, + encode, decode, None, None, None, None, None, None, 0) + cpython.Py_INCREF(codec) # immortalize + + if format == PG_FORMAT_BINARY: + binary_codec_map[oid * xformat] = codec + elif format == PG_FORMAT_TEXT: + text_codec_map[oid * xformat] = codec + else: + raise exceptions.InternalClientError( + 'invalid data format: {}'.format(format)) + + +cdef register_extra_codec(str name, + encode_func encode, + decode_func decode, + ServerDataFormat format): + cdef: + Codec codec + str kind + + kind = 'scalar' + + codec = Codec(INVALIDOID) + codec.init(name, None, kind, CODEC_C, format, PG_XFORMAT_OBJECT, + encode, decode, None, None, None, None, None, None, 0) + EXTRA_CODECS[name, format] = codec + + +cdef inline Codec get_extra_codec(str name, ServerDataFormat format): + return EXTRA_CODECS.get((name, format)) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/pgproto.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/pgproto.pyx new file mode 100644 index 0000000..51d650d --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/pgproto.pyx @@ -0,0 +1,484 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef init_bits_codecs(): + register_core_codec(BITOID, + pgproto.bits_encode, + pgproto.bits_decode, + PG_FORMAT_BINARY) + + register_core_codec(VARBITOID, + pgproto.bits_encode, + pgproto.bits_decode, + PG_FORMAT_BINARY) + + +cdef init_bytea_codecs(): + register_core_codec(BYTEAOID, + pgproto.bytea_encode, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + register_core_codec(CHAROID, + pgproto.bytea_encode, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + +cdef init_datetime_codecs(): + register_core_codec(DATEOID, + pgproto.date_encode, + pgproto.date_decode, + PG_FORMAT_BINARY) + + register_core_codec(DATEOID, + pgproto.date_encode_tuple, + pgproto.date_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + register_core_codec(TIMEOID, + pgproto.time_encode, + pgproto.time_decode, + PG_FORMAT_BINARY) + + register_core_codec(TIMEOID, + pgproto.time_encode_tuple, + pgproto.time_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + register_core_codec(TIMETZOID, + pgproto.timetz_encode, + pgproto.timetz_decode, + PG_FORMAT_BINARY) + + register_core_codec(TIMETZOID, + pgproto.timetz_encode_tuple, + pgproto.timetz_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + register_core_codec(TIMESTAMPOID, + pgproto.timestamp_encode, + pgproto.timestamp_decode, + PG_FORMAT_BINARY) + + register_core_codec(TIMESTAMPOID, + pgproto.timestamp_encode_tuple, + pgproto.timestamp_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + register_core_codec(TIMESTAMPTZOID, + pgproto.timestamptz_encode, + pgproto.timestamptz_decode, + PG_FORMAT_BINARY) + + register_core_codec(TIMESTAMPTZOID, + pgproto.timestamp_encode_tuple, + pgproto.timestamp_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + register_core_codec(INTERVALOID, + pgproto.interval_encode, + pgproto.interval_decode, + PG_FORMAT_BINARY) + + register_core_codec(INTERVALOID, + pgproto.interval_encode_tuple, + pgproto.interval_decode_tuple, + PG_FORMAT_BINARY, + PG_XFORMAT_TUPLE) + + # For obsolete abstime/reltime/tinterval, we do not bother to + # interpret the value, and simply return and pass it as text. + # + register_core_codec(ABSTIMEOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + register_core_codec(RELTIMEOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + register_core_codec(TINTERVALOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + +cdef init_float_codecs(): + register_core_codec(FLOAT4OID, + pgproto.float4_encode, + pgproto.float4_decode, + PG_FORMAT_BINARY) + + register_core_codec(FLOAT8OID, + pgproto.float8_encode, + pgproto.float8_decode, + PG_FORMAT_BINARY) + + +cdef init_geometry_codecs(): + register_core_codec(BOXOID, + pgproto.box_encode, + pgproto.box_decode, + PG_FORMAT_BINARY) + + register_core_codec(LINEOID, + pgproto.line_encode, + pgproto.line_decode, + PG_FORMAT_BINARY) + + register_core_codec(LSEGOID, + pgproto.lseg_encode, + pgproto.lseg_decode, + PG_FORMAT_BINARY) + + register_core_codec(POINTOID, + pgproto.point_encode, + pgproto.point_decode, + PG_FORMAT_BINARY) + + register_core_codec(PATHOID, + pgproto.path_encode, + pgproto.path_decode, + PG_FORMAT_BINARY) + + register_core_codec(POLYGONOID, + pgproto.poly_encode, + pgproto.poly_decode, + PG_FORMAT_BINARY) + + register_core_codec(CIRCLEOID, + pgproto.circle_encode, + pgproto.circle_decode, + PG_FORMAT_BINARY) + + +cdef init_hstore_codecs(): + register_extra_codec('pg_contrib.hstore', + pgproto.hstore_encode, + pgproto.hstore_decode, + PG_FORMAT_BINARY) + + +cdef init_json_codecs(): + register_core_codec(JSONOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_BINARY) + register_core_codec(JSONBOID, + pgproto.jsonb_encode, + pgproto.jsonb_decode, + PG_FORMAT_BINARY) + register_core_codec(JSONPATHOID, + pgproto.jsonpath_encode, + pgproto.jsonpath_decode, + PG_FORMAT_BINARY) + + +cdef init_int_codecs(): + + register_core_codec(BOOLOID, + pgproto.bool_encode, + pgproto.bool_decode, + PG_FORMAT_BINARY) + + register_core_codec(INT2OID, + pgproto.int2_encode, + pgproto.int2_decode, + PG_FORMAT_BINARY) + + register_core_codec(INT4OID, + pgproto.int4_encode, + pgproto.int4_decode, + PG_FORMAT_BINARY) + + register_core_codec(INT8OID, + pgproto.int8_encode, + pgproto.int8_decode, + PG_FORMAT_BINARY) + + +cdef init_pseudo_codecs(): + # Void type is returned by SELECT void_returning_function() + register_core_codec(VOIDOID, + pgproto.void_encode, + pgproto.void_decode, + PG_FORMAT_BINARY) + + # Unknown type, always decoded as text + register_core_codec(UNKNOWNOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # OID and friends + oid_types = [ + OIDOID, XIDOID, CIDOID + ] + + for oid_type in oid_types: + register_core_codec(oid_type, + pgproto.uint4_encode, + pgproto.uint4_decode, + PG_FORMAT_BINARY) + + # 64-bit OID types + oid8_types = [ + XID8OID, + ] + + for oid_type in oid8_types: + register_core_codec(oid_type, + pgproto.uint8_encode, + pgproto.uint8_decode, + PG_FORMAT_BINARY) + + # reg* types -- these are really system catalog OIDs, but + # allow the catalog object name as an input. We could just + # decode these as OIDs, but handling them as text seems more + # useful. + # + reg_types = [ + REGPROCOID, REGPROCEDUREOID, REGOPEROID, REGOPERATOROID, + REGCLASSOID, REGTYPEOID, REGCONFIGOID, REGDICTIONARYOID, + REGNAMESPACEOID, REGROLEOID, REFCURSOROID, REGCOLLATIONOID, + ] + + for reg_type in reg_types: + register_core_codec(reg_type, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # cstring type is used by Postgres' I/O functions + register_core_codec(CSTRINGOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_BINARY) + + # various system pseudotypes with no I/O + no_io_types = [ + ANYOID, TRIGGEROID, EVENT_TRIGGEROID, LANGUAGE_HANDLEROID, + FDW_HANDLEROID, TSM_HANDLEROID, INTERNALOID, OPAQUEOID, + ANYELEMENTOID, ANYNONARRAYOID, ANYCOMPATIBLEOID, + ANYCOMPATIBLEARRAYOID, ANYCOMPATIBLENONARRAYOID, + ANYCOMPATIBLERANGEOID, ANYCOMPATIBLEMULTIRANGEOID, + ANYRANGEOID, ANYMULTIRANGEOID, ANYARRAYOID, + PG_DDL_COMMANDOID, INDEX_AM_HANDLEROID, TABLE_AM_HANDLEROID, + ] + + register_core_codec(ANYENUMOID, + NULL, + pgproto.text_decode, + PG_FORMAT_TEXT) + + for no_io_type in no_io_types: + register_core_codec(no_io_type, + NULL, + NULL, + PG_FORMAT_BINARY) + + # ACL specification string + register_core_codec(ACLITEMOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # Postgres' serialized expression tree type + register_core_codec(PG_NODE_TREEOID, + NULL, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # pg_lsn type -- a pointer to a location in the XLOG. + register_core_codec(PG_LSNOID, + pgproto.int8_encode, + pgproto.int8_decode, + PG_FORMAT_BINARY) + + register_core_codec(SMGROID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # pg_dependencies and pg_ndistinct are special types + # used in pg_statistic_ext columns. + register_core_codec(PG_DEPENDENCIESOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + register_core_codec(PG_NDISTINCTOID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + # pg_mcv_list is a special type used in pg_statistic_ext_data + # system catalog + register_core_codec(PG_MCV_LISTOID, + pgproto.bytea_encode, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + # These two are internal to BRIN index support and are unlikely + # to be sent, but since I/O functions for these exist, add decoders + # nonetheless. + register_core_codec(PG_BRIN_BLOOM_SUMMARYOID, + NULL, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + register_core_codec(PG_BRIN_MINMAX_MULTI_SUMMARYOID, + NULL, + pgproto.bytea_decode, + PG_FORMAT_BINARY) + + +cdef init_text_codecs(): + textoids = [ + NAMEOID, + BPCHAROID, + VARCHAROID, + TEXTOID, + XMLOID + ] + + for oid in textoids: + register_core_codec(oid, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_BINARY) + + register_core_codec(oid, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + +cdef init_tid_codecs(): + register_core_codec(TIDOID, + pgproto.tid_encode, + pgproto.tid_decode, + PG_FORMAT_BINARY) + + +cdef init_txid_codecs(): + register_core_codec(TXID_SNAPSHOTOID, + pgproto.pg_snapshot_encode, + pgproto.pg_snapshot_decode, + PG_FORMAT_BINARY) + + register_core_codec(PG_SNAPSHOTOID, + pgproto.pg_snapshot_encode, + pgproto.pg_snapshot_decode, + PG_FORMAT_BINARY) + + +cdef init_tsearch_codecs(): + ts_oids = [ + TSQUERYOID, + TSVECTOROID, + ] + + for oid in ts_oids: + register_core_codec(oid, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + register_core_codec(GTSVECTOROID, + NULL, + pgproto.text_decode, + PG_FORMAT_TEXT) + + +cdef init_uuid_codecs(): + register_core_codec(UUIDOID, + pgproto.uuid_encode, + pgproto.uuid_decode, + PG_FORMAT_BINARY) + + +cdef init_numeric_codecs(): + register_core_codec(NUMERICOID, + pgproto.numeric_encode_text, + pgproto.numeric_decode_text, + PG_FORMAT_TEXT) + + register_core_codec(NUMERICOID, + pgproto.numeric_encode_binary, + pgproto.numeric_decode_binary, + PG_FORMAT_BINARY) + + +cdef init_network_codecs(): + register_core_codec(CIDROID, + pgproto.cidr_encode, + pgproto.cidr_decode, + PG_FORMAT_BINARY) + + register_core_codec(INETOID, + pgproto.inet_encode, + pgproto.inet_decode, + PG_FORMAT_BINARY) + + register_core_codec(MACADDROID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + register_core_codec(MACADDR8OID, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + +cdef init_monetary_codecs(): + moneyoids = [ + MONEYOID, + ] + + for oid in moneyoids: + register_core_codec(oid, + pgproto.text_encode, + pgproto.text_decode, + PG_FORMAT_TEXT) + + +cdef init_all_pgproto_codecs(): + # Builtin types, in lexicographical order. + init_bits_codecs() + init_bytea_codecs() + init_datetime_codecs() + init_float_codecs() + init_geometry_codecs() + init_int_codecs() + init_json_codecs() + init_monetary_codecs() + init_network_codecs() + init_numeric_codecs() + init_text_codecs() + init_tid_codecs() + init_tsearch_codecs() + init_txid_codecs() + init_uuid_codecs() + + # Various pseudotypes and system types + init_pseudo_codecs() + + # contrib + init_hstore_codecs() + + +init_all_pgproto_codecs() diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/range.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/range.pyx new file mode 100644 index 0000000..1038c18 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/range.pyx @@ -0,0 +1,207 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from asyncpg import types as apg_types + +from collections.abc import Sequence as SequenceABC + +# defined in postgresql/src/include/utils/rangetypes.h +DEF RANGE_EMPTY = 0x01 # range is empty +DEF RANGE_LB_INC = 0x02 # lower bound is inclusive +DEF RANGE_UB_INC = 0x04 # upper bound is inclusive +DEF RANGE_LB_INF = 0x08 # lower bound is -infinity +DEF RANGE_UB_INF = 0x10 # upper bound is +infinity + + +cdef enum _RangeArgumentType: + _RANGE_ARGUMENT_INVALID = 0 + _RANGE_ARGUMENT_TUPLE = 1 + _RANGE_ARGUMENT_RANGE = 2 + + +cdef inline bint _range_has_lbound(uint8_t flags): + return not (flags & (RANGE_EMPTY | RANGE_LB_INF)) + + +cdef inline bint _range_has_ubound(uint8_t flags): + return not (flags & (RANGE_EMPTY | RANGE_UB_INF)) + + +cdef inline _RangeArgumentType _range_type(object obj): + if cpython.PyTuple_Check(obj) or cpython.PyList_Check(obj): + return _RANGE_ARGUMENT_TUPLE + elif isinstance(obj, apg_types.Range): + return _RANGE_ARGUMENT_RANGE + else: + return _RANGE_ARGUMENT_INVALID + + +cdef range_encode(ConnectionSettings settings, WriteBuffer buf, + object obj, uint32_t elem_oid, + encode_func_ex encoder, const void *encoder_arg): + cdef: + ssize_t obj_len + uint8_t flags = 0 + object lower = None + object upper = None + WriteBuffer bounds_data = WriteBuffer.new() + _RangeArgumentType arg_type = _range_type(obj) + + if arg_type == _RANGE_ARGUMENT_INVALID: + raise TypeError( + 'list, tuple or Range object expected (got type {})'.format( + type(obj))) + + elif arg_type == _RANGE_ARGUMENT_TUPLE: + obj_len = len(obj) + if obj_len == 2: + lower = obj[0] + upper = obj[1] + + if lower is None: + flags |= RANGE_LB_INF + + if upper is None: + flags |= RANGE_UB_INF + + flags |= RANGE_LB_INC | RANGE_UB_INC + + elif obj_len == 1: + lower = obj[0] + flags |= RANGE_LB_INC | RANGE_UB_INF + + elif obj_len == 0: + flags |= RANGE_EMPTY + + else: + raise ValueError( + 'expected 0, 1 or 2 elements in range (got {})'.format( + obj_len)) + + else: + if obj.isempty: + flags |= RANGE_EMPTY + else: + lower = obj.lower + upper = obj.upper + + if obj.lower_inc: + flags |= RANGE_LB_INC + elif lower is None: + flags |= RANGE_LB_INF + + if obj.upper_inc: + flags |= RANGE_UB_INC + elif upper is None: + flags |= RANGE_UB_INF + + if _range_has_lbound(flags): + encoder(settings, bounds_data, lower, encoder_arg) + + if _range_has_ubound(flags): + encoder(settings, bounds_data, upper, encoder_arg) + + buf.write_int32(1 + bounds_data.len()) + buf.write_byte(flags) + buf.write_buffer(bounds_data) + + +cdef range_decode(ConnectionSettings settings, FRBuffer *buf, + decode_func_ex decoder, const void *decoder_arg): + cdef: + uint8_t flags = frb_read(buf, 1)[0] + int32_t bound_len + object lower = None + object upper = None + FRBuffer bound_buf + + if _range_has_lbound(flags): + bound_len = hton.unpack_int32(frb_read(buf, 4)) + if bound_len == -1: + lower = None + else: + frb_slice_from(&bound_buf, buf, bound_len) + lower = decoder(settings, &bound_buf, decoder_arg) + + if _range_has_ubound(flags): + bound_len = hton.unpack_int32(frb_read(buf, 4)) + if bound_len == -1: + upper = None + else: + frb_slice_from(&bound_buf, buf, bound_len) + upper = decoder(settings, &bound_buf, decoder_arg) + + return apg_types.Range(lower=lower, upper=upper, + lower_inc=(flags & RANGE_LB_INC) != 0, + upper_inc=(flags & RANGE_UB_INC) != 0, + empty=(flags & RANGE_EMPTY) != 0) + + +cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf, + object obj, uint32_t elem_oid, + encode_func_ex encoder, const void *encoder_arg): + cdef: + WriteBuffer elem_data + ssize_t elem_data_len + ssize_t elem_count + + if not isinstance(obj, SequenceABC): + raise TypeError( + 'expected a sequence (got type {!r})'.format(type(obj).__name__) + ) + + elem_data = WriteBuffer.new() + + for elem in obj: + range_encode(settings, elem_data, elem, elem_oid, encoder, encoder_arg) + + elem_count = len(obj) + if elem_count > INT32_MAX: + raise OverflowError(f'too many elements in multirange value') + + elem_data_len = elem_data.len() + if elem_data_len > INT32_MAX - 4: + raise OverflowError( + f'size of encoded multirange datum exceeds the maximum allowed' + f' {INT32_MAX - 4} bytes') + + # Datum length + buf.write_int32(4 + elem_data_len) + # Number of elements in multirange + buf.write_int32(elem_count) + buf.write_buffer(elem_data) + + +cdef multirange_decode(ConnectionSettings settings, FRBuffer *buf, + decode_func_ex decoder, const void *decoder_arg): + cdef: + int32_t nelems = hton.unpack_int32(frb_read(buf, 4)) + FRBuffer elem_buf + int32_t elem_len + int i + list result + + if nelems == 0: + return [] + + if nelems < 0: + raise exceptions.ProtocolError( + 'unexpected multirange size value: {}'.format(nelems)) + + result = cpython.PyList_New(nelems) + for i in range(nelems): + elem_len = hton.unpack_int32(frb_read(buf, 4)) + if elem_len == -1: + raise exceptions.ProtocolError( + 'unexpected NULL element in multirange value') + else: + frb_slice_from(&elem_buf, buf, elem_len) + elem = range_decode(settings, &elem_buf, decoder, decoder_arg) + cpython.Py_INCREF(elem) + cpython.PyList_SET_ITEM(result, i, elem) + + return result diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/record.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/record.pyx new file mode 100644 index 0000000..6446f2d --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/record.pyx @@ -0,0 +1,71 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from asyncpg import exceptions + + +cdef inline record_encode_frame(ConnectionSettings settings, WriteBuffer buf, + WriteBuffer elem_data, int32_t elem_count): + buf.write_int32(4 + elem_data.len()) + # attribute count + buf.write_int32(elem_count) + # encoded attribute data + buf.write_buffer(elem_data) + + +cdef anonymous_record_decode(ConnectionSettings settings, FRBuffer *buf): + cdef: + tuple result + ssize_t elem_count + ssize_t i + int32_t elem_len + uint32_t elem_typ + Codec elem_codec + FRBuffer elem_buf + + elem_count = hton.unpack_int32(frb_read(buf, 4)) + result = cpython.PyTuple_New(elem_count) + + for i in range(elem_count): + elem_typ = hton.unpack_int32(frb_read(buf, 4)) + elem_len = hton.unpack_int32(frb_read(buf, 4)) + + if elem_len == -1: + elem = None + else: + elem_codec = settings.get_data_codec(elem_typ) + if elem_codec is None or not elem_codec.has_decoder(): + raise exceptions.InternalClientError( + 'no decoder for composite type element in ' + 'position {} of type OID {}'.format(i, elem_typ)) + elem = elem_codec.decode(settings, + frb_slice_from(&elem_buf, buf, elem_len)) + + cpython.Py_INCREF(elem) + cpython.PyTuple_SET_ITEM(result, i, elem) + + return result + + +cdef anonymous_record_encode(ConnectionSettings settings, WriteBuffer buf, obj): + raise exceptions.UnsupportedClientFeatureError( + 'input of anonymous composite types is not supported', + hint=( + 'Consider declaring an explicit composite type and ' + 'using it to cast the argument.' + ), + detail='PostgreSQL does not implement anonymous composite type input.' + ) + + +cdef init_record_codecs(): + register_core_codec(RECORDOID, + anonymous_record_encode, + anonymous_record_decode, + PG_FORMAT_BINARY) + +init_record_codecs() diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/textutils.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/textutils.pyx new file mode 100644 index 0000000..dfaf29e --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/codecs/textutils.pyx @@ -0,0 +1,99 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef inline uint32_t _apg_tolower(uint32_t c): + if c >= 'A' and c <= 'Z': + return c + 'a' - 'A' + else: + return c + + +cdef int apg_strcasecmp(const Py_UCS4 *s1, const Py_UCS4 *s2): + cdef: + uint32_t c1 + uint32_t c2 + int i = 0 + + while True: + c1 = s1[i] + c2 = s2[i] + + if c1 != c2: + c1 = _apg_tolower(c1) + c2 = _apg_tolower(c2) + if c1 != c2: + return c1 - c2 + + if c1 == 0 or c2 == 0: + break + + i += 1 + + return 0 + + +cdef int apg_strcasecmp_char(const char *s1, const char *s2): + cdef: + uint8_t c1 + uint8_t c2 + int i = 0 + + while True: + c1 = s1[i] + c2 = s2[i] + + if c1 != c2: + c1 = _apg_tolower(c1) + c2 = _apg_tolower(c2) + if c1 != c2: + return c1 - c2 + + if c1 == 0 or c2 == 0: + break + + i += 1 + + return 0 + + +cdef inline bint apg_ascii_isspace(Py_UCS4 ch): + return ( + ch == ' ' or + ch == '\n' or + ch == '\r' or + ch == '\t' or + ch == '\v' or + ch == '\f' + ) + + +cdef Py_UCS4 *apg_parse_int32(Py_UCS4 *buf, int32_t *num): + cdef: + Py_UCS4 *p + int32_t n = 0 + int32_t neg = 0 + + if buf[0] == '-': + neg = 1 + buf += 1 + elif buf[0] == '+': + buf += 1 + + p = buf + while p[0] >= '0' and p[0] <= '9': + n = 10 * n - (p[0] - '0') + p += 1 + + if p == buf: + return NULL + + if not neg: + n = -n + + num[0] = n + + return p diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/consts.pxi b/sbsheriff/Lib/site-packages/asyncpg/protocol/consts.pxi new file mode 100644 index 0000000..e1f8726 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/consts.pxi @@ -0,0 +1,12 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +DEF _MAXINT32 = 2**31 - 1 +DEF _COPY_BUFFER_SIZE = 524288 +DEF _COPY_SIGNATURE = b"PGCOPY\n\377\r\n\0" +DEF _EXECUTE_MANY_BUF_NUM = 4 +DEF _EXECUTE_MANY_BUF_SIZE = 32768 diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pxd new file mode 100644 index 0000000..f21559b --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pxd @@ -0,0 +1,194 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +include "scram.pxd" + + +cdef enum ConnectionStatus: + CONNECTION_OK = 1 + CONNECTION_BAD = 2 + CONNECTION_STARTED = 3 # Waiting for connection to be made. + + +cdef enum ProtocolState: + PROTOCOL_IDLE = 0 + + PROTOCOL_FAILED = 1 + PROTOCOL_ERROR_CONSUME = 2 + PROTOCOL_CANCELLED = 3 + PROTOCOL_TERMINATING = 4 + + PROTOCOL_AUTH = 10 + PROTOCOL_PREPARE = 11 + PROTOCOL_BIND_EXECUTE = 12 + PROTOCOL_BIND_EXECUTE_MANY = 13 + PROTOCOL_CLOSE_STMT_PORTAL = 14 + PROTOCOL_SIMPLE_QUERY = 15 + PROTOCOL_EXECUTE = 16 + PROTOCOL_BIND = 17 + PROTOCOL_COPY_OUT = 18 + PROTOCOL_COPY_OUT_DATA = 19 + PROTOCOL_COPY_OUT_DONE = 20 + PROTOCOL_COPY_IN = 21 + PROTOCOL_COPY_IN_DATA = 22 + + +cdef enum AuthenticationMessage: + AUTH_SUCCESSFUL = 0 + AUTH_REQUIRED_KERBEROS = 2 + AUTH_REQUIRED_PASSWORD = 3 + AUTH_REQUIRED_PASSWORDMD5 = 5 + AUTH_REQUIRED_SCMCRED = 6 + AUTH_REQUIRED_GSS = 7 + AUTH_REQUIRED_GSS_CONTINUE = 8 + AUTH_REQUIRED_SSPI = 9 + AUTH_REQUIRED_SASL = 10 + AUTH_SASL_CONTINUE = 11 + AUTH_SASL_FINAL = 12 + + +AUTH_METHOD_NAME = { + AUTH_REQUIRED_KERBEROS: 'kerberosv5', + AUTH_REQUIRED_PASSWORD: 'password', + AUTH_REQUIRED_PASSWORDMD5: 'md5', + AUTH_REQUIRED_GSS: 'gss', + AUTH_REQUIRED_SASL: 'scram-sha-256', + AUTH_REQUIRED_SSPI: 'sspi', +} + + +cdef enum ResultType: + RESULT_OK = 1 + RESULT_FAILED = 2 + + +cdef enum TransactionStatus: + PQTRANS_IDLE = 0 # connection idle + PQTRANS_ACTIVE = 1 # command in progress + PQTRANS_INTRANS = 2 # idle, within transaction block + PQTRANS_INERROR = 3 # idle, within failed transaction + PQTRANS_UNKNOWN = 4 # cannot determine status + + +ctypedef object (*decode_row_method)(object, const char*, ssize_t) + + +cdef class CoreProtocol: + cdef: + ReadBuffer buffer + bint _skip_discard + bint _discard_data + + # executemany support data + object _execute_iter + str _execute_portal_name + str _execute_stmt_name + + ConnectionStatus con_status + ProtocolState state + TransactionStatus xact_status + + str encoding + + object transport + + # Instance of _ConnectionParameters + object con_params + # Instance of SCRAMAuthentication + SCRAMAuthentication scram + + readonly int32_t backend_pid + readonly int32_t backend_secret + + ## Result + ResultType result_type + object result + bytes result_param_desc + bytes result_row_desc + bytes result_status_msg + + # True - completed, False - suspended + bint result_execute_completed + + cpdef is_in_transaction(self) + cdef _process__auth(self, char mtype) + cdef _process__prepare(self, char mtype) + cdef _process__bind_execute(self, char mtype) + cdef _process__bind_execute_many(self, char mtype) + cdef _process__close_stmt_portal(self, char mtype) + cdef _process__simple_query(self, char mtype) + cdef _process__bind(self, char mtype) + cdef _process__copy_out(self, char mtype) + cdef _process__copy_out_data(self, char mtype) + cdef _process__copy_in(self, char mtype) + cdef _process__copy_in_data(self, char mtype) + + cdef _parse_msg_authentication(self) + cdef _parse_msg_parameter_status(self) + cdef _parse_msg_notification(self) + cdef _parse_msg_backend_key_data(self) + cdef _parse_msg_ready_for_query(self) + cdef _parse_data_msgs(self) + cdef _parse_copy_data_msgs(self) + cdef _parse_msg_error_response(self, is_error) + cdef _parse_msg_command_complete(self) + + cdef _write_copy_data_msg(self, object data) + cdef _write_copy_done_msg(self) + cdef _write_copy_fail_msg(self, str cause) + + cdef _auth_password_message_cleartext(self) + cdef _auth_password_message_md5(self, bytes salt) + cdef _auth_password_message_sasl_initial(self, list sasl_auth_methods) + cdef _auth_password_message_sasl_continue(self, bytes server_response) + + cdef _write(self, buf) + cdef _writelines(self, list buffers) + + cdef _read_server_messages(self) + + cdef _push_result(self) + cdef _reset_result(self) + cdef _set_state(self, ProtocolState new_state) + + cdef _ensure_connected(self) + + cdef WriteBuffer _build_parse_message(self, str stmt_name, str query) + cdef WriteBuffer _build_bind_message(self, str portal_name, + str stmt_name, + WriteBuffer bind_data) + cdef WriteBuffer _build_empty_bind_data(self) + cdef WriteBuffer _build_execute_message(self, str portal_name, + int32_t limit) + + + cdef _connect(self) + cdef _prepare(self, str stmt_name, str query) + cdef _send_bind_message(self, str portal_name, str stmt_name, + WriteBuffer bind_data, int32_t limit) + cdef _bind_execute(self, str portal_name, str stmt_name, + WriteBuffer bind_data, int32_t limit) + cdef bint _bind_execute_many(self, str portal_name, str stmt_name, + object bind_data) + cdef bint _bind_execute_many_more(self, bint first=*) + cdef _bind_execute_many_fail(self, object error, bint first=*) + cdef _bind(self, str portal_name, str stmt_name, + WriteBuffer bind_data) + cdef _execute(self, str portal_name, int32_t limit) + cdef _close(self, str name, bint is_portal) + cdef _simple_query(self, str query) + cdef _copy_out(self, str copy_stmt) + cdef _copy_in(self, str copy_stmt) + cdef _terminate(self) + + cdef _decode_row(self, const char* buf, ssize_t buf_len) + + cdef _on_result(self) + cdef _on_notification(self, pid, channel, payload) + cdef _on_notice(self, parsed) + cdef _set_server_parameter(self, name, val) + cdef _on_connection_lost(self, exc) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pyx new file mode 100644 index 0000000..6bf1adc --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/coreproto.pyx @@ -0,0 +1,1137 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import hashlib + + +include "scram.pyx" + + +cdef class CoreProtocol: + + def __init__(self, con_params): + # type of `con_params` is `_ConnectionParameters` + self.buffer = ReadBuffer() + self.user = con_params.user + self.password = con_params.password + self.auth_msg = None + self.con_params = con_params + self.con_status = CONNECTION_BAD + self.state = PROTOCOL_IDLE + self.xact_status = PQTRANS_IDLE + self.encoding = 'utf-8' + # type of `scram` is `SCRAMAuthentcation` + self.scram = None + + self._reset_result() + + cpdef is_in_transaction(self): + # PQTRANS_INTRANS = idle, within transaction block + # PQTRANS_INERROR = idle, within failed transaction + return self.xact_status in (PQTRANS_INTRANS, PQTRANS_INERROR) + + cdef _read_server_messages(self): + cdef: + char mtype + ProtocolState state + pgproto.take_message_method take_message = \ + self.buffer.take_message + pgproto.get_message_type_method get_message_type= \ + self.buffer.get_message_type + + while take_message(self.buffer) == 1: + mtype = get_message_type(self.buffer) + state = self.state + + try: + if mtype == b'S': + # ParameterStatus + self._parse_msg_parameter_status() + + elif mtype == b'A': + # NotificationResponse + self._parse_msg_notification() + + elif mtype == b'N': + # 'N' - NoticeResponse + self._on_notice(self._parse_msg_error_response(False)) + + elif state == PROTOCOL_AUTH: + self._process__auth(mtype) + + elif state == PROTOCOL_PREPARE: + self._process__prepare(mtype) + + elif state == PROTOCOL_BIND_EXECUTE: + self._process__bind_execute(mtype) + + elif state == PROTOCOL_BIND_EXECUTE_MANY: + self._process__bind_execute_many(mtype) + + elif state == PROTOCOL_EXECUTE: + self._process__bind_execute(mtype) + + elif state == PROTOCOL_BIND: + self._process__bind(mtype) + + elif state == PROTOCOL_CLOSE_STMT_PORTAL: + self._process__close_stmt_portal(mtype) + + elif state == PROTOCOL_SIMPLE_QUERY: + self._process__simple_query(mtype) + + elif state == PROTOCOL_COPY_OUT: + self._process__copy_out(mtype) + + elif (state == PROTOCOL_COPY_OUT_DATA or + state == PROTOCOL_COPY_OUT_DONE): + self._process__copy_out_data(mtype) + + elif state == PROTOCOL_COPY_IN: + self._process__copy_in(mtype) + + elif state == PROTOCOL_COPY_IN_DATA: + self._process__copy_in_data(mtype) + + elif state == PROTOCOL_CANCELLED: + # discard all messages until the sync message + if mtype == b'E': + self._parse_msg_error_response(True) + elif mtype == b'Z': + self._parse_msg_ready_for_query() + self._push_result() + else: + self.buffer.discard_message() + + elif state == PROTOCOL_ERROR_CONSUME: + # Error in protocol (on asyncpg side); + # discard all messages until sync message + + if mtype == b'Z': + # Sync point, self to push the result + if self.result_type != RESULT_FAILED: + self.result_type = RESULT_FAILED + self.result = apg_exc.InternalClientError( + 'unknown error in protocol implementation') + + self._parse_msg_ready_for_query() + self._push_result() + + else: + self.buffer.discard_message() + + elif state == PROTOCOL_TERMINATING: + # The connection is being terminated. + # discard all messages until connection + # termination. + self.buffer.discard_message() + + else: + raise apg_exc.InternalClientError( + f'cannot process message {chr(mtype)!r}: ' + f'protocol is in an unexpected state {state!r}.') + + except Exception as ex: + self.result_type = RESULT_FAILED + self.result = ex + + if mtype == b'Z': + self._push_result() + else: + self.state = PROTOCOL_ERROR_CONSUME + + finally: + self.buffer.finish_message() + + cdef _process__auth(self, char mtype): + if mtype == b'R': + # Authentication... + try: + self._parse_msg_authentication() + except Exception as ex: + # Exception in authentication parsing code + # is usually either malformed authentication data + # or missing support for cryptographic primitives + # in the hashlib module. + self.result_type = RESULT_FAILED + self.result = apg_exc.InternalClientError( + f"unexpected error while performing authentication: {ex}") + self.result.__cause__ = ex + self.con_status = CONNECTION_BAD + self._push_result() + else: + if self.result_type != RESULT_OK: + self.con_status = CONNECTION_BAD + self._push_result() + + elif self.auth_msg is not None: + # Server wants us to send auth data, so do that. + self._write(self.auth_msg) + self.auth_msg = None + + elif mtype == b'K': + # BackendKeyData + self._parse_msg_backend_key_data() + + elif mtype == b'E': + # ErrorResponse + self.con_status = CONNECTION_BAD + self._parse_msg_error_response(True) + self._push_result() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self.con_status = CONNECTION_OK + self._push_result() + + cdef _process__prepare(self, char mtype): + if mtype == b't': + # Parameters description + self.result_param_desc = self.buffer.consume_message() + + elif mtype == b'1': + # ParseComplete + self.buffer.discard_message() + + elif mtype == b'T': + # Row description + self.result_row_desc = self.buffer.consume_message() + self._push_result() + + elif mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + # we don't send a sync during the parse/describe sequence + # but send a FLUSH instead. If an error happens we need to + # send a SYNC explicitly in order to mark the end of the transaction. + # this effectively clears the error and we then wait until we get a + # ready for new query message + self._write(SYNC_MESSAGE) + self.state = PROTOCOL_ERROR_CONSUME + + elif mtype == b'n': + # NoData + self.buffer.discard_message() + self._push_result() + + cdef _process__bind_execute(self, char mtype): + if mtype == b'D': + # DataRow + self._parse_data_msgs() + + elif mtype == b's': + # PortalSuspended + self.buffer.discard_message() + + elif mtype == b'C': + # CommandComplete + self.result_execute_completed = True + self._parse_msg_command_complete() + + elif mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + + elif mtype == b'2': + # BindComplete + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + elif mtype == b'I': + # EmptyQueryResponse + self.buffer.discard_message() + + cdef _process__bind_execute_many(self, char mtype): + cdef WriteBuffer buf + + if mtype == b'D': + # DataRow + self._parse_data_msgs() + + elif mtype == b's': + # PortalSuspended + self.buffer.discard_message() + + elif mtype == b'C': + # CommandComplete + self._parse_msg_command_complete() + + elif mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + + elif mtype == b'2': + # BindComplete + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + elif mtype == b'I': + # EmptyQueryResponse + self.buffer.discard_message() + + elif mtype == b'1': + # ParseComplete + self.buffer.discard_message() + + cdef _process__bind(self, char mtype): + if mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + + elif mtype == b'2': + # BindComplete + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _process__close_stmt_portal(self, char mtype): + if mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + + elif mtype == b'3': + # CloseComplete + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _process__simple_query(self, char mtype): + if mtype in {b'D', b'I', b'T'}: + # 'D' - DataRow + # 'I' - EmptyQueryResponse + # 'T' - RowDescription + self.buffer.discard_message() + + elif mtype == b'E': + # ErrorResponse + self._parse_msg_error_response(True) + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + elif mtype == b'C': + # CommandComplete + self._parse_msg_command_complete() + + else: + # We don't really care about COPY IN etc + self.buffer.discard_message() + + cdef _process__copy_out(self, char mtype): + if mtype == b'E': + self._parse_msg_error_response(True) + + elif mtype == b'H': + # CopyOutResponse + self._set_state(PROTOCOL_COPY_OUT_DATA) + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _process__copy_out_data(self, char mtype): + if mtype == b'E': + self._parse_msg_error_response(True) + + elif mtype == b'd': + # CopyData + self._parse_copy_data_msgs() + + elif mtype == b'c': + # CopyDone + self.buffer.discard_message() + self._set_state(PROTOCOL_COPY_OUT_DONE) + + elif mtype == b'C': + # CommandComplete + self._parse_msg_command_complete() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _process__copy_in(self, char mtype): + if mtype == b'E': + self._parse_msg_error_response(True) + + elif mtype == b'G': + # CopyInResponse + self._set_state(PROTOCOL_COPY_IN_DATA) + self.buffer.discard_message() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _process__copy_in_data(self, char mtype): + if mtype == b'E': + self._parse_msg_error_response(True) + + elif mtype == b'C': + # CommandComplete + self._parse_msg_command_complete() + + elif mtype == b'Z': + # ReadyForQuery + self._parse_msg_ready_for_query() + self._push_result() + + cdef _parse_msg_command_complete(self): + cdef: + const char* cbuf + ssize_t cbuf_len + + cbuf = self.buffer.try_consume_message(&cbuf_len) + if cbuf != NULL and cbuf_len > 0: + msg = cpython.PyBytes_FromStringAndSize(cbuf, cbuf_len - 1) + else: + msg = self.buffer.read_null_str() + self.result_status_msg = msg + + cdef _parse_copy_data_msgs(self): + cdef: + ReadBuffer buf = self.buffer + + self.result = buf.consume_messages(b'd') + + # By this point we have consumed all CopyData messages + # in the inbound buffer. If there are no messages left + # in the buffer, we need to push the accumulated data + # out to the caller in anticipation of the new CopyData + # batch. If there _are_ non-CopyData messages left, + # we must not push the result here and let the + # _process__copy_out_data subprotocol do the job. + if not buf.take_message(): + self._on_result() + self.result = None + else: + # If there is a message in the buffer, put it back to + # be processed by the next protocol iteration. + buf.put_message() + + cdef _write_copy_data_msg(self, object data): + cdef: + WriteBuffer buf + object mview + Py_buffer *pybuf + + mview = cpythonx.PyMemoryView_GetContiguous( + data, cpython.PyBUF_READ, b'C') + + try: + pybuf = cpythonx.PyMemoryView_GET_BUFFER(mview) + + buf = WriteBuffer.new_message(b'd') + buf.write_cstr(pybuf.buf, pybuf.len) + buf.end_message() + finally: + mview.release() + + self._write(buf) + + cdef _write_copy_done_msg(self): + cdef: + WriteBuffer buf + + buf = WriteBuffer.new_message(b'c') + buf.end_message() + self._write(buf) + + cdef _write_copy_fail_msg(self, str cause): + cdef: + WriteBuffer buf + + buf = WriteBuffer.new_message(b'f') + buf.write_str(cause or '', self.encoding) + buf.end_message() + self._write(buf) + + cdef _parse_data_msgs(self): + cdef: + ReadBuffer buf = self.buffer + list rows + + decode_row_method decoder = self._decode_row + pgproto.try_consume_message_method try_consume_message = \ + buf.try_consume_message + pgproto.take_message_type_method take_message_type = \ + buf.take_message_type + + const char* cbuf + ssize_t cbuf_len + object row + bytes mem + + if PG_DEBUG: + if buf.get_message_type() != b'D': + raise apg_exc.InternalClientError( + '_parse_data_msgs: first message is not "D"') + + if self._discard_data: + while take_message_type(buf, b'D'): + buf.discard_message() + return + + if PG_DEBUG: + if type(self.result) is not list: + raise apg_exc.InternalClientError( + '_parse_data_msgs: result is not a list, but {!r}'. + format(self.result)) + + rows = self.result + while take_message_type(buf, b'D'): + cbuf = try_consume_message(buf, &cbuf_len) + if cbuf != NULL: + row = decoder(self, cbuf, cbuf_len) + else: + mem = buf.consume_message() + row = decoder( + self, + cpython.PyBytes_AS_STRING(mem), + cpython.PyBytes_GET_SIZE(mem)) + + cpython.PyList_Append(rows, row) + + cdef _parse_msg_backend_key_data(self): + self.backend_pid = self.buffer.read_int32() + self.backend_secret = self.buffer.read_int32() + + cdef _parse_msg_parameter_status(self): + name = self.buffer.read_null_str() + name = name.decode(self.encoding) + + val = self.buffer.read_null_str() + val = val.decode(self.encoding) + + self._set_server_parameter(name, val) + + cdef _parse_msg_notification(self): + pid = self.buffer.read_int32() + channel = self.buffer.read_null_str().decode(self.encoding) + payload = self.buffer.read_null_str().decode(self.encoding) + self._on_notification(pid, channel, payload) + + cdef _parse_msg_authentication(self): + cdef: + int32_t status + bytes md5_salt + list sasl_auth_methods + list unsupported_sasl_auth_methods + + status = self.buffer.read_int32() + + if status == AUTH_SUCCESSFUL: + # AuthenticationOk + self.result_type = RESULT_OK + + elif status == AUTH_REQUIRED_PASSWORD: + # AuthenticationCleartextPassword + self.result_type = RESULT_OK + self.auth_msg = self._auth_password_message_cleartext() + + elif status == AUTH_REQUIRED_PASSWORDMD5: + # AuthenticationMD5Password + # Note: MD5 salt is passed as a four-byte sequence + md5_salt = self.buffer.read_bytes(4) + self.auth_msg = self._auth_password_message_md5(md5_salt) + + elif status == AUTH_REQUIRED_SASL: + # AuthenticationSASL + # This requires making additional requests to the server in order + # to follow the SCRAM protocol defined in RFC 5802. + # get the SASL authentication methods that the server is providing + sasl_auth_methods = [] + unsupported_sasl_auth_methods = [] + # determine if the advertised authentication methods are supported, + # and if so, add them to the list + auth_method = self.buffer.read_null_str() + while auth_method: + if auth_method in SCRAMAuthentication.AUTHENTICATION_METHODS: + sasl_auth_methods.append(auth_method) + else: + unsupported_sasl_auth_methods.append(auth_method) + auth_method = self.buffer.read_null_str() + + # if none of the advertised authentication methods are supported, + # raise an error + # otherwise, initialize the SASL authentication exchange + if not sasl_auth_methods: + unsupported_sasl_auth_methods = [m.decode("ascii") + for m in unsupported_sasl_auth_methods] + self.result_type = RESULT_FAILED + self.result = apg_exc.InterfaceError( + 'unsupported SASL Authentication methods requested by the ' + 'server: {!r}'.format( + ", ".join(unsupported_sasl_auth_methods))) + else: + self.auth_msg = self._auth_password_message_sasl_initial( + sasl_auth_methods) + + elif status == AUTH_SASL_CONTINUE: + # AUTH_SASL_CONTINUE + # this requeires sending the second part of the SASL exchange, where + # the client parses information back from the server and determines + # if this is valid. + # The client builds a challenge response to the server + server_response = self.buffer.consume_message() + self.auth_msg = self._auth_password_message_sasl_continue( + server_response) + + elif status == AUTH_SASL_FINAL: + # AUTH_SASL_FINAL + server_response = self.buffer.consume_message() + if not self.scram.verify_server_final_message(server_response): + self.result_type = RESULT_FAILED + self.result = apg_exc.InterfaceError( + 'could not verify server signature for ' + 'SCRAM authentciation: scram-sha-256', + ) + + elif status in (AUTH_REQUIRED_KERBEROS, AUTH_REQUIRED_SCMCRED, + AUTH_REQUIRED_GSS, AUTH_REQUIRED_GSS_CONTINUE, + AUTH_REQUIRED_SSPI): + self.result_type = RESULT_FAILED + self.result = apg_exc.InterfaceError( + 'unsupported authentication method requested by the ' + 'server: {!r}'.format(AUTH_METHOD_NAME[status])) + + else: + self.result_type = RESULT_FAILED + self.result = apg_exc.InterfaceError( + 'unsupported authentication method requested by the ' + 'server: {}'.format(status)) + + if status not in [AUTH_SASL_CONTINUE, AUTH_SASL_FINAL]: + self.buffer.discard_message() + + cdef _auth_password_message_cleartext(self): + cdef: + WriteBuffer msg + + msg = WriteBuffer.new_message(b'p') + msg.write_bytestring(self.password.encode('ascii')) + msg.end_message() + + return msg + + cdef _auth_password_message_md5(self, bytes salt): + cdef: + WriteBuffer msg + + msg = WriteBuffer.new_message(b'p') + + # 'md5' + md5(md5(password + username) + salt)) + userpass = ((self.password or '') + (self.user or '')).encode('ascii') + hash = hashlib.md5(hashlib.md5(userpass).hexdigest().\ + encode('ascii') + salt).hexdigest().encode('ascii') + + msg.write_bytestring(b'md5' + hash) + msg.end_message() + + return msg + + cdef _auth_password_message_sasl_initial(self, list sasl_auth_methods): + cdef: + WriteBuffer msg + + # use the first supported advertized mechanism + self.scram = SCRAMAuthentication(sasl_auth_methods[0]) + # this involves a call and response with the server + msg = WriteBuffer.new_message(b'p') + msg.write_bytes(self.scram.create_client_first_message(self.user or '')) + msg.end_message() + + return msg + + cdef _auth_password_message_sasl_continue(self, bytes server_response): + cdef: + WriteBuffer msg + + # determine if there is a valid server response + self.scram.parse_server_first_message(server_response) + # this involves a call and response with the server + msg = WriteBuffer.new_message(b'p') + client_final_message = self.scram.create_client_final_message( + self.password or '') + msg.write_bytes(client_final_message) + msg.end_message() + + return msg + + cdef _parse_msg_ready_for_query(self): + cdef char status = self.buffer.read_byte() + + if status == b'I': + self.xact_status = PQTRANS_IDLE + elif status == b'T': + self.xact_status = PQTRANS_INTRANS + elif status == b'E': + self.xact_status = PQTRANS_INERROR + else: + self.xact_status = PQTRANS_UNKNOWN + + cdef _parse_msg_error_response(self, is_error): + cdef: + char code + bytes message + dict parsed = {} + + while True: + code = self.buffer.read_byte() + if code == 0: + break + + message = self.buffer.read_null_str() + + parsed[chr(code)] = message.decode() + + if is_error: + self.result_type = RESULT_FAILED + self.result = parsed + else: + return parsed + + cdef _push_result(self): + try: + self._on_result() + finally: + self._set_state(PROTOCOL_IDLE) + self._reset_result() + + cdef _reset_result(self): + self.result_type = RESULT_OK + self.result = None + self.result_param_desc = None + self.result_row_desc = None + self.result_status_msg = None + self.result_execute_completed = False + self._discard_data = False + + # executemany support data + self._execute_iter = None + self._execute_portal_name = None + self._execute_stmt_name = None + + cdef _set_state(self, ProtocolState new_state): + if new_state == PROTOCOL_IDLE: + if self.state == PROTOCOL_FAILED: + raise apg_exc.InternalClientError( + 'cannot switch to "idle" state; ' + 'protocol is in the "failed" state') + elif self.state == PROTOCOL_IDLE: + pass + else: + self.state = new_state + + elif new_state == PROTOCOL_FAILED: + self.state = PROTOCOL_FAILED + + elif new_state == PROTOCOL_CANCELLED: + self.state = PROTOCOL_CANCELLED + + elif new_state == PROTOCOL_TERMINATING: + self.state = PROTOCOL_TERMINATING + + else: + if self.state == PROTOCOL_IDLE: + self.state = new_state + + elif (self.state == PROTOCOL_COPY_OUT and + new_state == PROTOCOL_COPY_OUT_DATA): + self.state = new_state + + elif (self.state == PROTOCOL_COPY_OUT_DATA and + new_state == PROTOCOL_COPY_OUT_DONE): + self.state = new_state + + elif (self.state == PROTOCOL_COPY_IN and + new_state == PROTOCOL_COPY_IN_DATA): + self.state = new_state + + elif self.state == PROTOCOL_FAILED: + raise apg_exc.InternalClientError( + 'cannot switch to state {}; ' + 'protocol is in the "failed" state'.format(new_state)) + else: + raise apg_exc.InternalClientError( + 'cannot switch to state {}; ' + 'another operation ({}) is in progress'.format( + new_state, self.state)) + + cdef _ensure_connected(self): + if self.con_status != CONNECTION_OK: + raise apg_exc.InternalClientError('not connected') + + cdef WriteBuffer _build_parse_message(self, str stmt_name, str query): + cdef WriteBuffer buf + + buf = WriteBuffer.new_message(b'P') + buf.write_str(stmt_name, self.encoding) + buf.write_str(query, self.encoding) + buf.write_int16(0) + + buf.end_message() + return buf + + cdef WriteBuffer _build_bind_message(self, str portal_name, + str stmt_name, + WriteBuffer bind_data): + cdef WriteBuffer buf + + buf = WriteBuffer.new_message(b'B') + buf.write_str(portal_name, self.encoding) + buf.write_str(stmt_name, self.encoding) + + # Arguments + buf.write_buffer(bind_data) + + buf.end_message() + return buf + + cdef WriteBuffer _build_empty_bind_data(self): + cdef WriteBuffer buf + buf = WriteBuffer.new() + buf.write_int16(0) # The number of parameter format codes + buf.write_int16(0) # The number of parameter values + buf.write_int16(0) # The number of result-column format codes + return buf + + cdef WriteBuffer _build_execute_message(self, str portal_name, + int32_t limit): + cdef WriteBuffer buf + + buf = WriteBuffer.new_message(b'E') + buf.write_str(portal_name, self.encoding) # name of the portal + buf.write_int32(limit) # number of rows to return; 0 - all + + buf.end_message() + return buf + + # API for subclasses + + cdef _connect(self): + cdef: + WriteBuffer buf + WriteBuffer outbuf + + if self.con_status != CONNECTION_BAD: + raise apg_exc.InternalClientError('already connected') + + self._set_state(PROTOCOL_AUTH) + self.con_status = CONNECTION_STARTED + + # Assemble a startup message + buf = WriteBuffer() + + # protocol version + buf.write_int16(3) + buf.write_int16(0) + + buf.write_bytestring(b'client_encoding') + buf.write_bytestring("'{}'".format(self.encoding).encode('ascii')) + + buf.write_str('user', self.encoding) + buf.write_str(self.con_params.user, self.encoding) + + buf.write_str('database', self.encoding) + buf.write_str(self.con_params.database, self.encoding) + + if self.con_params.server_settings is not None: + for k, v in self.con_params.server_settings.items(): + buf.write_str(k, self.encoding) + buf.write_str(v, self.encoding) + + buf.write_bytestring(b'') + + # Send the buffer + outbuf = WriteBuffer() + outbuf.write_int32(buf.len() + 4) + outbuf.write_buffer(buf) + self._write(outbuf) + + cdef _prepare(self, str stmt_name, str query): + cdef: + WriteBuffer packet + WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_PREPARE) + + packet = self._build_parse_message(stmt_name, query) + + buf = WriteBuffer.new_message(b'D') + buf.write_byte(b'S') + buf.write_str(stmt_name, self.encoding) + buf.end_message() + packet.write_buffer(buf) + + packet.write_bytes(FLUSH_MESSAGE) + + self._write(packet) + + cdef _send_bind_message(self, str portal_name, str stmt_name, + WriteBuffer bind_data, int32_t limit): + + cdef: + WriteBuffer packet + WriteBuffer buf + + buf = self._build_bind_message(portal_name, stmt_name, bind_data) + packet = buf + + buf = self._build_execute_message(portal_name, limit) + packet.write_buffer(buf) + + packet.write_bytes(SYNC_MESSAGE) + + self._write(packet) + + cdef _bind_execute(self, str portal_name, str stmt_name, + WriteBuffer bind_data, int32_t limit): + + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_BIND_EXECUTE) + + self.result = [] + + self._send_bind_message(portal_name, stmt_name, bind_data, limit) + + cdef bint _bind_execute_many(self, str portal_name, str stmt_name, + object bind_data): + self._ensure_connected() + self._set_state(PROTOCOL_BIND_EXECUTE_MANY) + + self.result = None + self._discard_data = True + self._execute_iter = bind_data + self._execute_portal_name = portal_name + self._execute_stmt_name = stmt_name + return self._bind_execute_many_more(True) + + cdef bint _bind_execute_many_more(self, bint first=False): + cdef: + WriteBuffer packet + WriteBuffer buf + list buffers = [] + + # as we keep sending, the server may return an error early + if self.result_type == RESULT_FAILED: + self._write(SYNC_MESSAGE) + return False + + # collect up to four 32KB buffers to send + # https://github.com/MagicStack/asyncpg/pull/289#issuecomment-391215051 + while len(buffers) < _EXECUTE_MANY_BUF_NUM: + packet = WriteBuffer.new() + + # fill one 32KB buffer + while packet.len() < _EXECUTE_MANY_BUF_SIZE: + try: + # grab one item from the input + buf = next(self._execute_iter) + + # reached the end of the input + except StopIteration: + if first: + # if we never send anything, simply set the result + self._push_result() + else: + # otherwise, append SYNC and send the buffers + packet.write_bytes(SYNC_MESSAGE) + buffers.append(memoryview(packet)) + self._writelines(buffers) + return False + + # error in input, give up the buffers and cleanup + except Exception as ex: + self._bind_execute_many_fail(ex, first) + return False + + # all good, write to the buffer + first = False + packet.write_buffer( + self._build_bind_message( + self._execute_portal_name, + self._execute_stmt_name, + buf, + ) + ) + packet.write_buffer( + self._build_execute_message(self._execute_portal_name, 0, + ) + ) + + # collected one buffer + buffers.append(memoryview(packet)) + + # write to the wire, and signal the caller for more to send + self._writelines(buffers) + return True + + cdef _bind_execute_many_fail(self, object error, bint first=False): + cdef WriteBuffer buf + + self.result_type = RESULT_FAILED + self.result = error + if first: + self._push_result() + elif self.is_in_transaction(): + # we're in an explicit transaction, just SYNC + self._write(SYNC_MESSAGE) + else: + # In an implicit transaction, if `ignore_till_sync` is set, + # `ROLLBACK` will be ignored and `Sync` will restore the state; + # or the transaction will be rolled back with a warning saying + # that there was no transaction, but rollback is done anyway, + # so we could safely ignore this warning. + # GOTCHA: cannot use simple query message here, because it is + # ignored if `ignore_till_sync` is set. + buf = self._build_parse_message('', 'ROLLBACK') + buf.write_buffer(self._build_bind_message( + '', '', self._build_empty_bind_data())) + buf.write_buffer(self._build_execute_message('', 0)) + buf.write_bytes(SYNC_MESSAGE) + self._write(buf) + + cdef _execute(self, str portal_name, int32_t limit): + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_EXECUTE) + + self.result = [] + + buf = self._build_execute_message(portal_name, limit) + + buf.write_bytes(SYNC_MESSAGE) + + self._write(buf) + + cdef _bind(self, str portal_name, str stmt_name, + WriteBuffer bind_data): + + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_BIND) + + buf = self._build_bind_message(portal_name, stmt_name, bind_data) + + buf.write_bytes(SYNC_MESSAGE) + + self._write(buf) + + cdef _close(self, str name, bint is_portal): + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_CLOSE_STMT_PORTAL) + + buf = WriteBuffer.new_message(b'C') + + if is_portal: + buf.write_byte(b'P') + else: + buf.write_byte(b'S') + + buf.write_str(name, self.encoding) + buf.end_message() + + buf.write_bytes(SYNC_MESSAGE) + + self._write(buf) + + cdef _simple_query(self, str query): + cdef WriteBuffer buf + self._ensure_connected() + self._set_state(PROTOCOL_SIMPLE_QUERY) + buf = WriteBuffer.new_message(b'Q') + buf.write_str(query, self.encoding) + buf.end_message() + self._write(buf) + + cdef _copy_out(self, str copy_stmt): + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_COPY_OUT) + + # Send the COPY .. TO STDOUT using the SimpleQuery protocol. + buf = WriteBuffer.new_message(b'Q') + buf.write_str(copy_stmt, self.encoding) + buf.end_message() + self._write(buf) + + cdef _copy_in(self, str copy_stmt): + cdef WriteBuffer buf + + self._ensure_connected() + self._set_state(PROTOCOL_COPY_IN) + + buf = WriteBuffer.new_message(b'Q') + buf.write_str(copy_stmt, self.encoding) + buf.end_message() + self._write(buf) + + cdef _terminate(self): + cdef WriteBuffer buf + self._ensure_connected() + self._set_state(PROTOCOL_TERMINATING) + buf = WriteBuffer.new_message(b'X') + buf.end_message() + self._write(buf) + + cdef _write(self, buf): + raise NotImplementedError + + cdef _writelines(self, list buffers): + raise NotImplementedError + + cdef _decode_row(self, const char* buf, ssize_t buf_len): + pass + + cdef _set_server_parameter(self, name, val): + pass + + cdef _on_result(self): + pass + + cdef _on_notice(self, parsed): + pass + + cdef _on_notification(self, pid, channel, payload): + pass + + cdef _on_connection_lost(self, exc): + pass + + +cdef bytes SYNC_MESSAGE = bytes(WriteBuffer.new_message(b'S').end_message()) +cdef bytes FLUSH_MESSAGE = bytes(WriteBuffer.new_message(b'H').end_message()) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/cpythonx.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/cpythonx.pxd new file mode 100644 index 0000000..1c72988 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/cpythonx.pxd @@ -0,0 +1,19 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef extern from "Python.h": + int PyByteArray_Check(object) + + int PyMemoryView_Check(object) + Py_buffer *PyMemoryView_GET_BUFFER(object) + object PyMemoryView_GetContiguous(object, int buffertype, char order) + + Py_UCS4* PyUnicode_AsUCS4Copy(object) except NULL + object PyUnicode_FromKindAndData( + int kind, const void *buffer, Py_ssize_t size) + + int PyUnicode_4BYTE_KIND diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/encodings.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/encodings.pyx new file mode 100644 index 0000000..dcd692b --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/encodings.pyx @@ -0,0 +1,63 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +'''Map PostgreSQL encoding names to Python encoding names + +https://www.postgresql.org/docs/current/static/multibyte.html#CHARSET-TABLE +''' + +cdef dict ENCODINGS_MAP = { + 'abc': 'cp1258', + 'alt': 'cp866', + 'euc_cn': 'euccn', + 'euc_jp': 'eucjp', + 'euc_kr': 'euckr', + 'koi8r': 'koi8_r', + 'koi8u': 'koi8_u', + 'shift_jis_2004': 'euc_jis_2004', + 'sjis': 'shift_jis', + 'sql_ascii': 'ascii', + 'vscii': 'cp1258', + 'tcvn': 'cp1258', + 'tcvn5712': 'cp1258', + 'unicode': 'utf_8', + 'win': 'cp1521', + 'win1250': 'cp1250', + 'win1251': 'cp1251', + 'win1252': 'cp1252', + 'win1253': 'cp1253', + 'win1254': 'cp1254', + 'win1255': 'cp1255', + 'win1256': 'cp1256', + 'win1257': 'cp1257', + 'win1258': 'cp1258', + 'win866': 'cp866', + 'win874': 'cp874', + 'win932': 'cp932', + 'win936': 'cp936', + 'win949': 'cp949', + 'win950': 'cp950', + 'windows1250': 'cp1250', + 'windows1251': 'cp1251', + 'windows1252': 'cp1252', + 'windows1253': 'cp1253', + 'windows1254': 'cp1254', + 'windows1255': 'cp1255', + 'windows1256': 'cp1256', + 'windows1257': 'cp1257', + 'windows1258': 'cp1258', + 'windows866': 'cp866', + 'windows874': 'cp874', + 'windows932': 'cp932', + 'windows936': 'cp936', + 'windows949': 'cp949', + 'windows950': 'cp950', +} + + +cdef get_python_encoding(pg_encoding): + return ENCODINGS_MAP.get(pg_encoding.lower(), pg_encoding.lower()) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/pgtypes.pxi b/sbsheriff/Lib/site-packages/asyncpg/protocol/pgtypes.pxi new file mode 100644 index 0000000..e9bb782 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/pgtypes.pxi @@ -0,0 +1,266 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +# GENERATED FROM pg_catalog.pg_type +# DO NOT MODIFY, use tools/generate_type_map.py to update + +DEF INVALIDOID = 0 +DEF MAXBUILTINOID = 9999 +DEF MAXSUPPORTEDOID = 5080 + +DEF BOOLOID = 16 +DEF BYTEAOID = 17 +DEF CHAROID = 18 +DEF NAMEOID = 19 +DEF INT8OID = 20 +DEF INT2OID = 21 +DEF INT4OID = 23 +DEF REGPROCOID = 24 +DEF TEXTOID = 25 +DEF OIDOID = 26 +DEF TIDOID = 27 +DEF XIDOID = 28 +DEF CIDOID = 29 +DEF PG_DDL_COMMANDOID = 32 +DEF JSONOID = 114 +DEF XMLOID = 142 +DEF PG_NODE_TREEOID = 194 +DEF SMGROID = 210 +DEF TABLE_AM_HANDLEROID = 269 +DEF INDEX_AM_HANDLEROID = 325 +DEF POINTOID = 600 +DEF LSEGOID = 601 +DEF PATHOID = 602 +DEF BOXOID = 603 +DEF POLYGONOID = 604 +DEF LINEOID = 628 +DEF CIDROID = 650 +DEF FLOAT4OID = 700 +DEF FLOAT8OID = 701 +DEF ABSTIMEOID = 702 +DEF RELTIMEOID = 703 +DEF TINTERVALOID = 704 +DEF UNKNOWNOID = 705 +DEF CIRCLEOID = 718 +DEF MACADDR8OID = 774 +DEF MONEYOID = 790 +DEF MACADDROID = 829 +DEF INETOID = 869 +DEF _TEXTOID = 1009 +DEF _OIDOID = 1028 +DEF ACLITEMOID = 1033 +DEF BPCHAROID = 1042 +DEF VARCHAROID = 1043 +DEF DATEOID = 1082 +DEF TIMEOID = 1083 +DEF TIMESTAMPOID = 1114 +DEF TIMESTAMPTZOID = 1184 +DEF INTERVALOID = 1186 +DEF TIMETZOID = 1266 +DEF BITOID = 1560 +DEF VARBITOID = 1562 +DEF NUMERICOID = 1700 +DEF REFCURSOROID = 1790 +DEF REGPROCEDUREOID = 2202 +DEF REGOPEROID = 2203 +DEF REGOPERATOROID = 2204 +DEF REGCLASSOID = 2205 +DEF REGTYPEOID = 2206 +DEF RECORDOID = 2249 +DEF CSTRINGOID = 2275 +DEF ANYOID = 2276 +DEF ANYARRAYOID = 2277 +DEF VOIDOID = 2278 +DEF TRIGGEROID = 2279 +DEF LANGUAGE_HANDLEROID = 2280 +DEF INTERNALOID = 2281 +DEF OPAQUEOID = 2282 +DEF ANYELEMENTOID = 2283 +DEF ANYNONARRAYOID = 2776 +DEF UUIDOID = 2950 +DEF TXID_SNAPSHOTOID = 2970 +DEF FDW_HANDLEROID = 3115 +DEF PG_LSNOID = 3220 +DEF TSM_HANDLEROID = 3310 +DEF PG_NDISTINCTOID = 3361 +DEF PG_DEPENDENCIESOID = 3402 +DEF ANYENUMOID = 3500 +DEF TSVECTOROID = 3614 +DEF TSQUERYOID = 3615 +DEF GTSVECTOROID = 3642 +DEF REGCONFIGOID = 3734 +DEF REGDICTIONARYOID = 3769 +DEF JSONBOID = 3802 +DEF ANYRANGEOID = 3831 +DEF EVENT_TRIGGEROID = 3838 +DEF JSONPATHOID = 4072 +DEF REGNAMESPACEOID = 4089 +DEF REGROLEOID = 4096 +DEF REGCOLLATIONOID = 4191 +DEF ANYMULTIRANGEOID = 4537 +DEF ANYCOMPATIBLEMULTIRANGEOID = 4538 +DEF PG_BRIN_BLOOM_SUMMARYOID = 4600 +DEF PG_BRIN_MINMAX_MULTI_SUMMARYOID = 4601 +DEF PG_MCV_LISTOID = 5017 +DEF PG_SNAPSHOTOID = 5038 +DEF XID8OID = 5069 +DEF ANYCOMPATIBLEOID = 5077 +DEF ANYCOMPATIBLEARRAYOID = 5078 +DEF ANYCOMPATIBLENONARRAYOID = 5079 +DEF ANYCOMPATIBLERANGEOID = 5080 + +cdef ARRAY_TYPES = (_TEXTOID, _OIDOID,) + +BUILTIN_TYPE_OID_MAP = { + ABSTIMEOID: 'abstime', + ACLITEMOID: 'aclitem', + ANYARRAYOID: 'anyarray', + ANYCOMPATIBLEARRAYOID: 'anycompatiblearray', + ANYCOMPATIBLEMULTIRANGEOID: 'anycompatiblemultirange', + ANYCOMPATIBLENONARRAYOID: 'anycompatiblenonarray', + ANYCOMPATIBLEOID: 'anycompatible', + ANYCOMPATIBLERANGEOID: 'anycompatiblerange', + ANYELEMENTOID: 'anyelement', + ANYENUMOID: 'anyenum', + ANYMULTIRANGEOID: 'anymultirange', + ANYNONARRAYOID: 'anynonarray', + ANYOID: 'any', + ANYRANGEOID: 'anyrange', + BITOID: 'bit', + BOOLOID: 'bool', + BOXOID: 'box', + BPCHAROID: 'bpchar', + BYTEAOID: 'bytea', + CHAROID: 'char', + CIDOID: 'cid', + CIDROID: 'cidr', + CIRCLEOID: 'circle', + CSTRINGOID: 'cstring', + DATEOID: 'date', + EVENT_TRIGGEROID: 'event_trigger', + FDW_HANDLEROID: 'fdw_handler', + FLOAT4OID: 'float4', + FLOAT8OID: 'float8', + GTSVECTOROID: 'gtsvector', + INDEX_AM_HANDLEROID: 'index_am_handler', + INETOID: 'inet', + INT2OID: 'int2', + INT4OID: 'int4', + INT8OID: 'int8', + INTERNALOID: 'internal', + INTERVALOID: 'interval', + JSONBOID: 'jsonb', + JSONOID: 'json', + JSONPATHOID: 'jsonpath', + LANGUAGE_HANDLEROID: 'language_handler', + LINEOID: 'line', + LSEGOID: 'lseg', + MACADDR8OID: 'macaddr8', + MACADDROID: 'macaddr', + MONEYOID: 'money', + NAMEOID: 'name', + NUMERICOID: 'numeric', + OIDOID: 'oid', + OPAQUEOID: 'opaque', + PATHOID: 'path', + PG_BRIN_BLOOM_SUMMARYOID: 'pg_brin_bloom_summary', + PG_BRIN_MINMAX_MULTI_SUMMARYOID: 'pg_brin_minmax_multi_summary', + PG_DDL_COMMANDOID: 'pg_ddl_command', + PG_DEPENDENCIESOID: 'pg_dependencies', + PG_LSNOID: 'pg_lsn', + PG_MCV_LISTOID: 'pg_mcv_list', + PG_NDISTINCTOID: 'pg_ndistinct', + PG_NODE_TREEOID: 'pg_node_tree', + PG_SNAPSHOTOID: 'pg_snapshot', + POINTOID: 'point', + POLYGONOID: 'polygon', + RECORDOID: 'record', + REFCURSOROID: 'refcursor', + REGCLASSOID: 'regclass', + REGCOLLATIONOID: 'regcollation', + REGCONFIGOID: 'regconfig', + REGDICTIONARYOID: 'regdictionary', + REGNAMESPACEOID: 'regnamespace', + REGOPERATOROID: 'regoperator', + REGOPEROID: 'regoper', + REGPROCEDUREOID: 'regprocedure', + REGPROCOID: 'regproc', + REGROLEOID: 'regrole', + REGTYPEOID: 'regtype', + RELTIMEOID: 'reltime', + SMGROID: 'smgr', + TABLE_AM_HANDLEROID: 'table_am_handler', + TEXTOID: 'text', + TIDOID: 'tid', + TIMEOID: 'time', + TIMESTAMPOID: 'timestamp', + TIMESTAMPTZOID: 'timestamptz', + TIMETZOID: 'timetz', + TINTERVALOID: 'tinterval', + TRIGGEROID: 'trigger', + TSM_HANDLEROID: 'tsm_handler', + TSQUERYOID: 'tsquery', + TSVECTOROID: 'tsvector', + TXID_SNAPSHOTOID: 'txid_snapshot', + UNKNOWNOID: 'unknown', + UUIDOID: 'uuid', + VARBITOID: 'varbit', + VARCHAROID: 'varchar', + VOIDOID: 'void', + XID8OID: 'xid8', + XIDOID: 'xid', + XMLOID: 'xml', + _OIDOID: 'oid[]', + _TEXTOID: 'text[]' +} + +BUILTIN_TYPE_NAME_MAP = {v: k for k, v in BUILTIN_TYPE_OID_MAP.items()} + +BUILTIN_TYPE_NAME_MAP['smallint'] = \ + BUILTIN_TYPE_NAME_MAP['int2'] + +BUILTIN_TYPE_NAME_MAP['int'] = \ + BUILTIN_TYPE_NAME_MAP['int4'] + +BUILTIN_TYPE_NAME_MAP['integer'] = \ + BUILTIN_TYPE_NAME_MAP['int4'] + +BUILTIN_TYPE_NAME_MAP['bigint'] = \ + BUILTIN_TYPE_NAME_MAP['int8'] + +BUILTIN_TYPE_NAME_MAP['decimal'] = \ + BUILTIN_TYPE_NAME_MAP['numeric'] + +BUILTIN_TYPE_NAME_MAP['real'] = \ + BUILTIN_TYPE_NAME_MAP['float4'] + +BUILTIN_TYPE_NAME_MAP['double precision'] = \ + BUILTIN_TYPE_NAME_MAP['float8'] + +BUILTIN_TYPE_NAME_MAP['timestamp with timezone'] = \ + BUILTIN_TYPE_NAME_MAP['timestamptz'] + +BUILTIN_TYPE_NAME_MAP['timestamp without timezone'] = \ + BUILTIN_TYPE_NAME_MAP['timestamp'] + +BUILTIN_TYPE_NAME_MAP['time with timezone'] = \ + BUILTIN_TYPE_NAME_MAP['timetz'] + +BUILTIN_TYPE_NAME_MAP['time without timezone'] = \ + BUILTIN_TYPE_NAME_MAP['time'] + +BUILTIN_TYPE_NAME_MAP['char'] = \ + BUILTIN_TYPE_NAME_MAP['bpchar'] + +BUILTIN_TYPE_NAME_MAP['character'] = \ + BUILTIN_TYPE_NAME_MAP['bpchar'] + +BUILTIN_TYPE_NAME_MAP['character varying'] = \ + BUILTIN_TYPE_NAME_MAP['varchar'] + +BUILTIN_TYPE_NAME_MAP['bit varying'] = \ + BUILTIN_TYPE_NAME_MAP['varbit'] diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pxd new file mode 100644 index 0000000..3906af2 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pxd @@ -0,0 +1,38 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef class PreparedStatementState: + cdef: + readonly str name + readonly str query + readonly bint closed + readonly int refs + readonly type record_class + readonly bint ignore_custom_codec + + + list row_desc + list parameters_desc + + ConnectionSettings settings + + int16_t args_num + bint have_text_args + tuple args_codecs + + int16_t cols_num + object cols_desc + bint have_text_cols + tuple rows_codecs + + cdef _encode_bind_msg(self, args, int seqno = ?) + cpdef _init_codecs(self) + cdef _ensure_rows_decoder(self) + cdef _ensure_args_encoder(self) + cdef _set_row_desc(self, object desc) + cdef _set_args_desc(self, object desc) + cdef _decode_row(self, const char* cbuf, ssize_t buf_len) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pyx new file mode 100644 index 0000000..b1f2a66 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/prepared_stmt.pyx @@ -0,0 +1,388 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from asyncpg import exceptions + + +@cython.final +cdef class PreparedStatementState: + + def __cinit__( + self, + str name, + str query, + BaseProtocol protocol, + type record_class, + bint ignore_custom_codec + ): + self.name = name + self.query = query + self.settings = protocol.settings + self.row_desc = self.parameters_desc = None + self.args_codecs = self.rows_codecs = None + self.args_num = self.cols_num = 0 + self.cols_desc = None + self.closed = False + self.refs = 0 + self.record_class = record_class + self.ignore_custom_codec = ignore_custom_codec + + def _get_parameters(self): + cdef Codec codec + + result = [] + for oid in self.parameters_desc: + codec = self.settings.get_data_codec(oid) + if codec is None: + raise exceptions.InternalClientError( + 'missing codec information for OID {}'.format(oid)) + result.append(apg_types.Type( + oid, codec.name, codec.kind, codec.schema)) + + return tuple(result) + + def _get_attributes(self): + cdef Codec codec + + if not self.row_desc: + return () + + result = [] + for d in self.row_desc: + name = d[0] + oid = d[3] + + codec = self.settings.get_data_codec(oid) + if codec is None: + raise exceptions.InternalClientError( + 'missing codec information for OID {}'.format(oid)) + + name = name.decode(self.settings._encoding) + + result.append( + apg_types.Attribute(name, + apg_types.Type(oid, codec.name, codec.kind, codec.schema))) + + return tuple(result) + + def _init_types(self): + cdef: + Codec codec + set missing = set() + + if self.parameters_desc: + for p_oid in self.parameters_desc: + codec = self.settings.get_data_codec(p_oid) + if codec is None or not codec.has_encoder(): + missing.add(p_oid) + + if self.row_desc: + for rdesc in self.row_desc: + codec = self.settings.get_data_codec((rdesc[3])) + if codec is None or not codec.has_decoder(): + missing.add(rdesc[3]) + + return missing + + cpdef _init_codecs(self): + self._ensure_args_encoder() + self._ensure_rows_decoder() + + def attach(self): + self.refs += 1 + + def detach(self): + self.refs -= 1 + + def mark_closed(self): + self.closed = True + + cdef _encode_bind_msg(self, args, int seqno = -1): + cdef: + int idx + WriteBuffer writer + Codec codec + + if not cpython.PySequence_Check(args): + if seqno >= 0: + raise exceptions.DataError( + f'invalid input in executemany() argument sequence ' + f'element #{seqno}: expected a sequence, got ' + f'{type(args).__name__}' + ) + else: + # Non executemany() callers do not pass user input directly, + # so bad input is a bug. + raise exceptions.InternalClientError( + f'Bind: expected a sequence, got {type(args).__name__}') + + if len(args) > 32767: + raise exceptions.InterfaceError( + 'the number of query arguments cannot exceed 32767') + + writer = WriteBuffer.new() + + num_args_passed = len(args) + if self.args_num != num_args_passed: + hint = 'Check the query against the passed list of arguments.' + + if self.args_num == 0: + # If the server was expecting zero arguments, it is likely + # that the user tried to parametrize a statement that does + # not support parameters. + hint += (r' Note that parameters are supported only in' + r' SELECT, INSERT, UPDATE, DELETE, and VALUES' + r' statements, and will *not* work in statements ' + r' like CREATE VIEW or DECLARE CURSOR.') + + raise exceptions.InterfaceError( + 'the server expects {x} argument{s} for this query, ' + '{y} {w} passed'.format( + x=self.args_num, s='s' if self.args_num != 1 else '', + y=num_args_passed, + w='was' if num_args_passed == 1 else 'were'), + hint=hint) + + if self.have_text_args: + writer.write_int16(self.args_num) + for idx in range(self.args_num): + codec = (self.args_codecs[idx]) + writer.write_int16(codec.format) + else: + # All arguments are in binary format + writer.write_int32(0x00010001) + + writer.write_int16(self.args_num) + + for idx in range(self.args_num): + arg = args[idx] + if arg is None: + writer.write_int32(-1) + else: + codec = (self.args_codecs[idx]) + try: + codec.encode(self.settings, writer, arg) + except (AssertionError, exceptions.InternalClientError): + # These are internal errors and should raise as-is. + raise + except exceptions.InterfaceError as e: + # This is already a descriptive error, but annotate + # with argument name for clarity. + pos = f'${idx + 1}' + if seqno >= 0: + pos = ( + f'{pos} in element #{seqno} of' + f' executemany() sequence' + ) + raise e.with_msg( + f'query argument {pos}: {e.args[0]}' + ) from None + except Exception as e: + # Everything else is assumed to be an encoding error + # due to invalid input. + pos = f'${idx + 1}' + if seqno >= 0: + pos = ( + f'{pos} in element #{seqno} of' + f' executemany() sequence' + ) + value_repr = repr(arg) + if len(value_repr) > 40: + value_repr = value_repr[:40] + '...' + + raise exceptions.DataError( + f'invalid input for query argument' + f' {pos}: {value_repr} ({e})' + ) from e + + if self.have_text_cols: + writer.write_int16(self.cols_num) + for idx in range(self.cols_num): + codec = (self.rows_codecs[idx]) + writer.write_int16(codec.format) + else: + # All columns are in binary format + writer.write_int32(0x00010001) + + return writer + + cdef _ensure_rows_decoder(self): + cdef: + list cols_names + object cols_mapping + tuple row + uint32_t oid + Codec codec + list codecs + + if self.cols_desc is not None: + return + + if self.cols_num == 0: + self.cols_desc = record.ApgRecordDesc_New({}, ()) + return + + cols_mapping = collections.OrderedDict() + cols_names = [] + codecs = [] + for i from 0 <= i < self.cols_num: + row = self.row_desc[i] + col_name = row[0].decode(self.settings._encoding) + cols_mapping[col_name] = i + cols_names.append(col_name) + oid = row[3] + codec = self.settings.get_data_codec( + oid, ignore_custom_codec=self.ignore_custom_codec) + if codec is None or not codec.has_decoder(): + raise exceptions.InternalClientError( + 'no decoder for OID {}'.format(oid)) + if not codec.is_binary(): + self.have_text_cols = True + + codecs.append(codec) + + self.cols_desc = record.ApgRecordDesc_New( + cols_mapping, tuple(cols_names)) + + self.rows_codecs = tuple(codecs) + + cdef _ensure_args_encoder(self): + cdef: + uint32_t p_oid + Codec codec + list codecs = [] + + if self.args_num == 0 or self.args_codecs is not None: + return + + for i from 0 <= i < self.args_num: + p_oid = self.parameters_desc[i] + codec = self.settings.get_data_codec( + p_oid, ignore_custom_codec=self.ignore_custom_codec) + if codec is None or not codec.has_encoder(): + raise exceptions.InternalClientError( + 'no encoder for OID {}'.format(p_oid)) + if codec.type not in {}: + self.have_text_args = True + + codecs.append(codec) + + self.args_codecs = tuple(codecs) + + cdef _set_row_desc(self, object desc): + self.row_desc = _decode_row_desc(desc) + self.cols_num = (len(self.row_desc)) + + cdef _set_args_desc(self, object desc): + self.parameters_desc = _decode_parameters_desc(desc) + self.args_num = (len(self.parameters_desc)) + + cdef _decode_row(self, const char* cbuf, ssize_t buf_len): + cdef: + Codec codec + int16_t fnum + int32_t flen + object dec_row + tuple rows_codecs = self.rows_codecs + ConnectionSettings settings = self.settings + int32_t i + FRBuffer rbuf + ssize_t bl + + frb_init(&rbuf, cbuf, buf_len) + + fnum = hton.unpack_int16(frb_read(&rbuf, 2)) + + if fnum != self.cols_num: + raise exceptions.ProtocolError( + 'the number of columns in the result row ({}) is ' + 'different from what was described ({})'.format( + fnum, self.cols_num)) + + dec_row = record.ApgRecord_New(self.record_class, self.cols_desc, fnum) + for i in range(fnum): + flen = hton.unpack_int32(frb_read(&rbuf, 4)) + + if flen == -1: + val = None + else: + # Clamp buffer size to that of the reported field length + # to make sure that codecs can rely on read_all() working + # properly. + bl = frb_get_len(&rbuf) + if flen > bl: + frb_check(&rbuf, flen) + frb_set_len(&rbuf, flen) + codec = cpython.PyTuple_GET_ITEM(rows_codecs, i) + val = codec.decode(settings, &rbuf) + if frb_get_len(&rbuf) != 0: + raise BufferError( + 'unexpected trailing {} bytes in buffer'.format( + frb_get_len(&rbuf))) + frb_set_len(&rbuf, bl - flen) + + cpython.Py_INCREF(val) + record.ApgRecord_SET_ITEM(dec_row, i, val) + + if frb_get_len(&rbuf) != 0: + raise BufferError('unexpected trailing {} bytes in buffer'.format( + frb_get_len(&rbuf))) + + return dec_row + + +cdef _decode_parameters_desc(object desc): + cdef: + ReadBuffer reader + int16_t nparams + uint32_t p_oid + list result = [] + + reader = ReadBuffer.new_message_parser(desc) + nparams = reader.read_int16() + + for i from 0 <= i < nparams: + p_oid = reader.read_int32() + result.append(p_oid) + + return result + + +cdef _decode_row_desc(object desc): + cdef: + ReadBuffer reader + + int16_t nfields + + bytes f_name + uint32_t f_table_oid + int16_t f_column_num + uint32_t f_dt_oid + int16_t f_dt_size + int32_t f_dt_mod + int16_t f_format + + list result + + reader = ReadBuffer.new_message_parser(desc) + nfields = reader.read_int16() + result = [] + + for i from 0 <= i < nfields: + f_name = reader.read_null_str() + f_table_oid = reader.read_int32() + f_column_num = reader.read_int16() + f_dt_oid = reader.read_int32() + f_dt_size = reader.read_int16() + f_dt_mod = reader.read_int32() + f_format = reader.read_int16() + + result.append( + (f_name, f_table_oid, f_column_num, f_dt_oid, + f_dt_size, f_dt_mod, f_format)) + + return result diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.cp310-win_amd64.pyd b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.cp310-win_amd64.pyd new file mode 100644 index 0000000..c1b695a Binary files /dev/null and b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.cp310-win_amd64.pyd differ diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pxd new file mode 100644 index 0000000..5f144e5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pxd @@ -0,0 +1,80 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from libc.stdint cimport int16_t, int32_t, uint16_t, \ + uint32_t, int64_t, uint64_t + +from asyncpg.pgproto.debug cimport PG_DEBUG + +from asyncpg.pgproto.pgproto cimport ( + WriteBuffer, + ReadBuffer, + FRBuffer, +) + +from asyncpg.pgproto cimport pgproto + +include "consts.pxi" +include "pgtypes.pxi" + +include "codecs/base.pxd" +include "settings.pxd" +include "coreproto.pxd" +include "prepared_stmt.pxd" + + +cdef class BaseProtocol(CoreProtocol): + + cdef: + object loop + object address + ConnectionSettings settings + object cancel_sent_waiter + object cancel_waiter + object waiter + bint return_extra + object create_future + object timeout_handle + object timeout_callback + object completed_callback + object conref + type record_class + bint is_reading + + str last_query + + bint writing_paused + bint closing + + readonly uint64_t queries_count + + bint _is_ssl + + PreparedStatementState statement + + cdef get_connection(self) + + cdef _get_timeout_impl(self, timeout) + cdef _check_state(self) + cdef _new_waiter(self, timeout) + cdef _coreproto_error(self) + + cdef _on_result__connect(self, object waiter) + cdef _on_result__prepare(self, object waiter) + cdef _on_result__bind_and_exec(self, object waiter) + cdef _on_result__close_stmt_or_portal(self, object waiter) + cdef _on_result__simple_query(self, object waiter) + cdef _on_result__bind(self, object waiter) + cdef _on_result__copy_out(self, object waiter) + cdef _on_result__copy_in(self, object waiter) + + cdef _handle_waiter_on_connection_lost(self, cause) + + cdef _dispatch_result(self) + + cdef inline resume_reading(self) + cdef inline pause_reading(self) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pyx new file mode 100644 index 0000000..3f512a8 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/protocol.pyx @@ -0,0 +1,1027 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +# cython: language_level=3 + +cimport cython +cimport cpython + +import asyncio +import builtins +import codecs +import collections.abc +import socket +import time +import weakref + +from asyncpg.pgproto.pgproto cimport ( + WriteBuffer, + ReadBuffer, + + FRBuffer, + frb_init, + frb_read, + frb_read_all, + frb_slice_from, + frb_check, + frb_set_len, + frb_get_len, +) + +from asyncpg.pgproto cimport pgproto +from asyncpg.protocol cimport cpythonx +from asyncpg.protocol cimport record + +from libc.stdint cimport int8_t, uint8_t, int16_t, uint16_t, \ + int32_t, uint32_t, int64_t, uint64_t, \ + INT32_MAX, UINT32_MAX + +from asyncpg.exceptions import _base as apg_exc_base +from asyncpg import compat +from asyncpg import types as apg_types +from asyncpg import exceptions as apg_exc + +from asyncpg.pgproto cimport hton + + +include "consts.pxi" +include "pgtypes.pxi" + +include "encodings.pyx" +include "settings.pyx" + +include "codecs/base.pyx" +include "codecs/textutils.pyx" + +# register codecs provided by pgproto +include "codecs/pgproto.pyx" + +# nonscalar +include "codecs/array.pyx" +include "codecs/range.pyx" +include "codecs/record.pyx" + +include "coreproto.pyx" +include "prepared_stmt.pyx" + + +NO_TIMEOUT = object() + + +cdef class BaseProtocol(CoreProtocol): + def __init__(self, addr, connected_fut, con_params, record_class: type, loop): + # type of `con_params` is `_ConnectionParameters` + CoreProtocol.__init__(self, con_params) + + self.loop = loop + self.transport = None + self.waiter = connected_fut + self.cancel_waiter = None + self.cancel_sent_waiter = None + + self.address = addr + self.settings = ConnectionSettings((self.address, con_params.database)) + self.record_class = record_class + + self.statement = None + self.return_extra = False + + self.last_query = None + + self.closing = False + self.is_reading = True + self.writing_allowed = asyncio.Event() + self.writing_allowed.set() + + self.timeout_handle = None + self.timeout_callback = self._on_timeout + self.completed_callback = self._on_waiter_completed + + self.queries_count = 0 + + self._is_ssl = False + + try: + self.create_future = loop.create_future + except AttributeError: + self.create_future = self._create_future_fallback + + def set_connection(self, connection): + self.conref = weakref.ref(connection) + + cdef get_connection(self): + if self.conref is not None: + return self.conref() + else: + return None + + def get_server_pid(self): + return self.backend_pid + + def get_settings(self): + return self.settings + + def get_record_class(self): + return self.record_class + + cdef inline resume_reading(self): + if not self.is_reading: + self.is_reading = True + self.transport.resume_reading() + + cdef inline pause_reading(self): + if self.is_reading: + self.is_reading = False + self.transport.pause_reading() + + @cython.iterable_coroutine + async def prepare(self, stmt_name, query, timeout, + *, + PreparedStatementState state=None, + ignore_custom_codec=False, + record_class): + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + + waiter = self._new_waiter(timeout) + try: + self._prepare(stmt_name, query) # network op + self.last_query = query + if state is None: + state = PreparedStatementState( + stmt_name, query, self, record_class, ignore_custom_codec) + self.statement = state + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def bind_execute(self, PreparedStatementState state, args, + str portal_name, int limit, return_extra, + timeout): + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + args_buf = state._encode_bind_msg(args) + + waiter = self._new_waiter(timeout) + try: + self._bind_execute( + portal_name, + state.name, + args_buf, + limit) # network op + + self.last_query = state.query + self.statement = state + self.return_extra = return_extra + self.queries_count += 1 + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def bind_execute_many(self, PreparedStatementState state, args, + str portal_name, timeout): + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + timer = Timer(timeout) + + # Make sure the argument sequence is encoded lazily with + # this generator expression to keep the memory pressure under + # control. + data_gen = (state._encode_bind_msg(b, i) for i, b in enumerate(args)) + arg_bufs = iter(data_gen) + + waiter = self._new_waiter(timeout) + try: + more = self._bind_execute_many( + portal_name, + state.name, + arg_bufs) # network op + + self.last_query = state.query + self.statement = state + self.return_extra = False + self.queries_count += 1 + + while more: + with timer: + await asyncio.wait_for( + self.writing_allowed.wait(), + timeout=timer.get_remaining_budget()) + # On Windows the above event somehow won't allow context + # switch, so forcing one with sleep(0) here + await asyncio.sleep(0) + if not timer.has_budget_greater_than(0): + raise asyncio.TimeoutError + more = self._bind_execute_many_more() # network op + + except asyncio.TimeoutError as e: + self._bind_execute_many_fail(e) # network op + + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def bind(self, PreparedStatementState state, args, + str portal_name, timeout): + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + args_buf = state._encode_bind_msg(args) + + waiter = self._new_waiter(timeout) + try: + self._bind( + portal_name, + state.name, + args_buf) # network op + + self.last_query = state.query + self.statement = state + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def execute(self, PreparedStatementState state, + str portal_name, int limit, return_extra, + timeout): + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + timeout = self._get_timeout_impl(timeout) + + waiter = self._new_waiter(timeout) + try: + self._execute( + portal_name, + limit) # network op + + self.last_query = state.query + self.statement = state + self.return_extra = return_extra + self.queries_count += 1 + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def query(self, query, timeout): + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + # query() needs to call _get_timeout instead of _get_timeout_impl + # for consistent validation, as it is called differently from + # prepare/bind/execute methods. + timeout = self._get_timeout(timeout) + + waiter = self._new_waiter(timeout) + try: + self._simple_query(query) # network op + self.last_query = query + self.queries_count += 1 + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + @cython.iterable_coroutine + async def copy_out(self, copy_stmt, sink, timeout): + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + + timeout = self._get_timeout_impl(timeout) + timer = Timer(timeout) + + # The copy operation is guarded by a single timeout + # on the top level. + waiter = self._new_waiter(timer.get_remaining_budget()) + + self._copy_out(copy_stmt) + + try: + while True: + self.resume_reading() + + with timer: + buffer, done, status_msg = await waiter + + # buffer will be empty if CopyDone was received apart from + # the last CopyData message. + if buffer: + try: + with timer: + await asyncio.wait_for( + sink(buffer), + timeout=timer.get_remaining_budget()) + except (Exception, asyncio.CancelledError) as ex: + # Abort the COPY operation on any error in + # output sink. + self._request_cancel() + # Make asyncio shut up about unretrieved + # QueryCanceledError + waiter.add_done_callback(lambda f: f.exception()) + raise + + # done will be True upon receipt of CopyDone. + if done: + break + + waiter = self._new_waiter(timer.get_remaining_budget()) + + finally: + self.resume_reading() + + return status_msg + + @cython.iterable_coroutine + async def copy_in(self, copy_stmt, reader, data, + records, PreparedStatementState record_stmt, timeout): + cdef: + WriteBuffer wbuf + ssize_t num_cols + Codec codec + + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + + timeout = self._get_timeout_impl(timeout) + timer = Timer(timeout) + + waiter = self._new_waiter(timer.get_remaining_budget()) + + # Initiate COPY IN. + self._copy_in(copy_stmt) + + try: + if record_stmt is not None: + # copy_in_records in binary mode + wbuf = WriteBuffer.new() + # Signature + wbuf.write_bytes(_COPY_SIGNATURE) + # Flags field + wbuf.write_int32(0) + # No header extension + wbuf.write_int32(0) + + record_stmt._ensure_rows_decoder() + codecs = record_stmt.rows_codecs + num_cols = len(codecs) + settings = self.settings + + for codec in codecs: + if (not codec.has_encoder() or + codec.format != PG_FORMAT_BINARY): + raise apg_exc.InternalClientError( + 'no binary format encoder for ' + 'type {} (OID {})'.format(codec.name, codec.oid)) + + if isinstance(records, collections.abc.AsyncIterable): + async for row in records: + # Tuple header + wbuf.write_int16(num_cols) + # Tuple data + for i in range(num_cols): + item = row[i] + if item is None: + wbuf.write_int32(-1) + else: + codec = cpython.PyTuple_GET_ITEM( + codecs, i) + codec.encode(settings, wbuf, item) + + if wbuf.len() >= _COPY_BUFFER_SIZE: + with timer: + await self.writing_allowed.wait() + self._write_copy_data_msg(wbuf) + wbuf = WriteBuffer.new() + else: + for row in records: + # Tuple header + wbuf.write_int16(num_cols) + # Tuple data + for i in range(num_cols): + item = row[i] + if item is None: + wbuf.write_int32(-1) + else: + codec = cpython.PyTuple_GET_ITEM( + codecs, i) + codec.encode(settings, wbuf, item) + + if wbuf.len() >= _COPY_BUFFER_SIZE: + with timer: + await self.writing_allowed.wait() + self._write_copy_data_msg(wbuf) + wbuf = WriteBuffer.new() + + # End of binary copy. + wbuf.write_int16(-1) + self._write_copy_data_msg(wbuf) + + elif reader is not None: + try: + aiter = reader.__aiter__ + except AttributeError: + raise TypeError('reader is not an asynchronous iterable') + else: + iterator = aiter() + + try: + while True: + # We rely on protocol flow control to moderate the + # rate of data messages. + with timer: + await self.writing_allowed.wait() + with timer: + chunk = await asyncio.wait_for( + iterator.__anext__(), + timeout=timer.get_remaining_budget()) + self._write_copy_data_msg(chunk) + except builtins.StopAsyncIteration: + pass + else: + # Buffer passed in directly. + await self.writing_allowed.wait() + self._write_copy_data_msg(data) + + except asyncio.TimeoutError: + self._write_copy_fail_msg('TimeoutError') + self._on_timeout(self.waiter) + try: + await waiter + except TimeoutError: + raise + else: + raise apg_exc.InternalClientError('TimoutError was not raised') + + except (Exception, asyncio.CancelledError) as e: + self._write_copy_fail_msg(str(e)) + self._request_cancel() + # Make asyncio shut up about unretrieved QueryCanceledError + waiter.add_done_callback(lambda f: f.exception()) + raise + + self._write_copy_done_msg() + + status_msg = await waiter + + return status_msg + + @cython.iterable_coroutine + async def close_statement(self, PreparedStatementState state, timeout): + if self.cancel_waiter is not None: + await self.cancel_waiter + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + self._check_state() + + if state.refs != 0: + raise apg_exc.InternalClientError( + 'cannot close prepared statement; refs == {} != 0'.format( + state.refs)) + + timeout = self._get_timeout_impl(timeout) + waiter = self._new_waiter(timeout) + try: + self._close(state.name, False) # network op + state.closed = True + except Exception as ex: + waiter.set_exception(ex) + self._coreproto_error() + finally: + return await waiter + + def is_closed(self): + return self.closing + + def is_connected(self): + return not self.closing and self.con_status == CONNECTION_OK + + def abort(self): + if self.closing: + return + self.closing = True + self._handle_waiter_on_connection_lost(None) + self._terminate() + self.transport.abort() + + @cython.iterable_coroutine + async def close(self, timeout): + if self.closing: + return + + self.closing = True + + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + + if self.cancel_waiter is not None: + await self.cancel_waiter + + if self.waiter is not None: + # If there is a query running, cancel it + self._request_cancel() + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + if self.cancel_waiter is not None: + await self.cancel_waiter + + assert self.waiter is None + + timeout = self._get_timeout_impl(timeout) + + # Ask the server to terminate the connection and wait for it + # to drop. + self.waiter = self._new_waiter(timeout) + self._terminate() + try: + await self.waiter + except ConnectionResetError: + # There appears to be a difference in behaviour of asyncio + # in Windows, where, instead of calling protocol.connection_lost() + # a ConnectionResetError will be thrown into the task. + pass + finally: + self.waiter = None + self.transport.abort() + + def _request_cancel(self): + self.cancel_waiter = self.create_future() + self.cancel_sent_waiter = self.create_future() + + con = self.get_connection() + if con is not None: + # if 'con' is None it means that the connection object has been + # garbage collected and that the transport will soon be aborted. + con._cancel_current_command(self.cancel_sent_waiter) + else: + self.loop.call_exception_handler({ + 'message': 'asyncpg.Protocol has no reference to its ' + 'Connection object and yet a cancellation ' + 'was requested. Please report this at ' + 'github.com/magicstack/asyncpg.' + }) + self.abort() + + if self.state == PROTOCOL_PREPARE: + # we need to send a SYNC to server if we cancel during the PREPARE phase + # because the PREPARE sequence does not send a SYNC itself. + # we cannot send this extra SYNC if we are not in PREPARE phase, + # because then we would issue two SYNCs and we would get two ReadyForQuery + # replies, which our current state machine implementation cannot handle + self._write(SYNC_MESSAGE) + self._set_state(PROTOCOL_CANCELLED) + + def _on_timeout(self, fut): + if self.waiter is not fut or fut.done() or \ + self.cancel_waiter is not None or \ + self.timeout_handle is None: + return + self._request_cancel() + self.waiter.set_exception(asyncio.TimeoutError()) + + def _on_waiter_completed(self, fut): + if self.timeout_handle: + self.timeout_handle.cancel() + self.timeout_handle = None + if fut is not self.waiter or self.cancel_waiter is not None: + return + if fut.cancelled(): + self._request_cancel() + + def _create_future_fallback(self): + return asyncio.Future(loop=self.loop) + + cdef _handle_waiter_on_connection_lost(self, cause): + if self.waiter is not None and not self.waiter.done(): + exc = apg_exc.ConnectionDoesNotExistError( + 'connection was closed in the middle of ' + 'operation') + if cause is not None: + exc.__cause__ = cause + self.waiter.set_exception(exc) + self.waiter = None + + cdef _set_server_parameter(self, name, val): + self.settings.add_setting(name, val) + + def _get_timeout(self, timeout): + if timeout is not None: + try: + if type(timeout) is bool: + raise ValueError + timeout = float(timeout) + except ValueError: + raise ValueError( + 'invalid timeout value: expected non-negative float ' + '(got {!r})'.format(timeout)) from None + + return self._get_timeout_impl(timeout) + + cdef inline _get_timeout_impl(self, timeout): + if timeout is None: + timeout = self.get_connection()._config.command_timeout + elif timeout is NO_TIMEOUT: + timeout = None + else: + timeout = float(timeout) + + if timeout is not None and timeout <= 0: + raise asyncio.TimeoutError() + return timeout + + cdef _check_state(self): + if self.cancel_waiter is not None: + raise apg_exc.InterfaceError( + 'cannot perform operation: another operation is cancelling') + if self.closing: + raise apg_exc.InterfaceError( + 'cannot perform operation: connection is closed') + if self.waiter is not None or self.timeout_handle is not None: + raise apg_exc.InterfaceError( + 'cannot perform operation: another operation is in progress') + + def _is_cancelling(self): + return ( + self.cancel_waiter is not None or + self.cancel_sent_waiter is not None + ) + + @cython.iterable_coroutine + async def _wait_for_cancellation(self): + if self.cancel_sent_waiter is not None: + await self.cancel_sent_waiter + self.cancel_sent_waiter = None + if self.cancel_waiter is not None: + await self.cancel_waiter + + cdef _coreproto_error(self): + try: + if self.waiter is not None: + if not self.waiter.done(): + raise apg_exc.InternalClientError( + 'waiter is not done while handling critical ' + 'protocol error') + self.waiter = None + finally: + self.abort() + + cdef _new_waiter(self, timeout): + if self.waiter is not None: + raise apg_exc.InterfaceError( + 'cannot perform operation: another operation is in progress') + self.waiter = self.create_future() + if timeout is not None: + self.timeout_handle = self.loop.call_later( + timeout, self.timeout_callback, self.waiter) + self.waiter.add_done_callback(self.completed_callback) + return self.waiter + + cdef _on_result__connect(self, object waiter): + waiter.set_result(True) + + cdef _on_result__prepare(self, object waiter): + if PG_DEBUG: + if self.statement is None: + raise apg_exc.InternalClientError( + '_on_result__prepare: statement is None') + + if self.result_param_desc is not None: + self.statement._set_args_desc(self.result_param_desc) + if self.result_row_desc is not None: + self.statement._set_row_desc(self.result_row_desc) + waiter.set_result(self.statement) + + cdef _on_result__bind_and_exec(self, object waiter): + if self.return_extra: + waiter.set_result(( + self.result, + self.result_status_msg, + self.result_execute_completed)) + else: + waiter.set_result(self.result) + + cdef _on_result__bind(self, object waiter): + waiter.set_result(self.result) + + cdef _on_result__close_stmt_or_portal(self, object waiter): + waiter.set_result(self.result) + + cdef _on_result__simple_query(self, object waiter): + waiter.set_result(self.result_status_msg.decode(self.encoding)) + + cdef _on_result__copy_out(self, object waiter): + cdef bint copy_done = self.state == PROTOCOL_COPY_OUT_DONE + if copy_done: + status_msg = self.result_status_msg.decode(self.encoding) + else: + status_msg = None + + # We need to put some backpressure on Postgres + # here in case the sink is slow to process the output. + self.pause_reading() + + waiter.set_result((self.result, copy_done, status_msg)) + + cdef _on_result__copy_in(self, object waiter): + status_msg = self.result_status_msg.decode(self.encoding) + waiter.set_result(status_msg) + + cdef _decode_row(self, const char* buf, ssize_t buf_len): + if PG_DEBUG: + if self.statement is None: + raise apg_exc.InternalClientError( + '_decode_row: statement is None') + + return self.statement._decode_row(buf, buf_len) + + cdef _dispatch_result(self): + waiter = self.waiter + self.waiter = None + + if PG_DEBUG: + if waiter is None: + raise apg_exc.InternalClientError('_on_result: waiter is None') + + if waiter.cancelled(): + return + + if waiter.done(): + raise apg_exc.InternalClientError('_on_result: waiter is done') + + if self.result_type == RESULT_FAILED: + if isinstance(self.result, dict): + exc = apg_exc_base.PostgresError.new( + self.result, query=self.last_query) + else: + exc = self.result + waiter.set_exception(exc) + return + + try: + if self.state == PROTOCOL_AUTH: + self._on_result__connect(waiter) + + elif self.state == PROTOCOL_PREPARE: + self._on_result__prepare(waiter) + + elif self.state == PROTOCOL_BIND_EXECUTE: + self._on_result__bind_and_exec(waiter) + + elif self.state == PROTOCOL_BIND_EXECUTE_MANY: + self._on_result__bind_and_exec(waiter) + + elif self.state == PROTOCOL_EXECUTE: + self._on_result__bind_and_exec(waiter) + + elif self.state == PROTOCOL_BIND: + self._on_result__bind(waiter) + + elif self.state == PROTOCOL_CLOSE_STMT_PORTAL: + self._on_result__close_stmt_or_portal(waiter) + + elif self.state == PROTOCOL_SIMPLE_QUERY: + self._on_result__simple_query(waiter) + + elif (self.state == PROTOCOL_COPY_OUT_DATA or + self.state == PROTOCOL_COPY_OUT_DONE): + self._on_result__copy_out(waiter) + + elif self.state == PROTOCOL_COPY_IN_DATA: + self._on_result__copy_in(waiter) + + elif self.state == PROTOCOL_TERMINATING: + # We are waiting for the connection to drop, so + # ignore any stray results at this point. + pass + + else: + raise apg_exc.InternalClientError( + 'got result for unknown protocol state {}'. + format(self.state)) + + except Exception as exc: + waiter.set_exception(exc) + + cdef _on_result(self): + if self.timeout_handle is not None: + self.timeout_handle.cancel() + self.timeout_handle = None + + if self.cancel_waiter is not None: + # We have received the result of a cancelled command. + if not self.cancel_waiter.done(): + # The cancellation future might have been cancelled + # by the cancellation of the entire task running the query. + self.cancel_waiter.set_result(None) + self.cancel_waiter = None + if self.waiter is not None and self.waiter.done(): + self.waiter = None + if self.waiter is None: + return + + try: + self._dispatch_result() + finally: + self.statement = None + self.last_query = None + self.return_extra = False + + cdef _on_notice(self, parsed): + con = self.get_connection() + if con is not None: + con._process_log_message(parsed, self.last_query) + + cdef _on_notification(self, pid, channel, payload): + con = self.get_connection() + if con is not None: + con._process_notification(pid, channel, payload) + + cdef _on_connection_lost(self, exc): + if self.closing: + # The connection was lost because + # Protocol.close() was called + if self.waiter is not None and not self.waiter.done(): + if exc is None: + self.waiter.set_result(None) + else: + self.waiter.set_exception(exc) + self.waiter = None + else: + # The connection was lost because it was + # terminated or due to another error; + # Throw an error in any awaiting waiter. + self.closing = True + # Cleanup the connection resources, including, possibly, + # releasing the pool holder. + con = self.get_connection() + if con is not None: + con._cleanup() + self._handle_waiter_on_connection_lost(exc) + + cdef _write(self, buf): + self.transport.write(memoryview(buf)) + + cdef _writelines(self, list buffers): + self.transport.writelines(buffers) + + # asyncio callbacks: + + def data_received(self, data): + self.buffer.feed_data(data) + self._read_server_messages() + + def connection_made(self, transport): + self.transport = transport + + sock = transport.get_extra_info('socket') + if (sock is not None and + (not hasattr(socket, 'AF_UNIX') + or sock.family != socket.AF_UNIX)): + sock.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, 1) + + try: + self._connect() + except Exception as ex: + transport.abort() + self.con_status = CONNECTION_BAD + self._set_state(PROTOCOL_FAILED) + self._on_error(ex) + + def connection_lost(self, exc): + self.con_status = CONNECTION_BAD + self._set_state(PROTOCOL_FAILED) + self._on_connection_lost(exc) + + def pause_writing(self): + self.writing_allowed.clear() + + def resume_writing(self): + self.writing_allowed.set() + + @property + def is_ssl(self): + return self._is_ssl + + @is_ssl.setter + def is_ssl(self, value): + self._is_ssl = value + + +class Timer: + def __init__(self, budget): + self._budget = budget + self._started = 0 + + def __enter__(self): + if self._budget is not None: + self._started = time.monotonic() + + def __exit__(self, et, e, tb): + if self._budget is not None: + self._budget -= time.monotonic() - self._started + + def get_remaining_budget(self): + return self._budget + + def has_budget_greater_than(self, amount): + if self._budget is None: + # Unlimited budget. + return True + else: + return self._budget > amount + + +class Protocol(BaseProtocol, asyncio.Protocol): + pass + + +def _create_record(object mapping, tuple elems): + # Exposed only for testing purposes. + + cdef: + object rec + int32_t i + + if mapping is None: + desc = record.ApgRecordDesc_New({}, ()) + else: + desc = record.ApgRecordDesc_New( + mapping, tuple(mapping) if mapping else ()) + + rec = record.ApgRecord_New(Record, desc, len(elems)) + for i in range(len(elems)): + elem = elems[i] + cpython.Py_INCREF(elem) + record.ApgRecord_SET_ITEM(rec, i, elem) + return rec + + +Record = record.ApgRecord_InitTypes() diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pxd new file mode 100644 index 0000000..5421429 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pxd @@ -0,0 +1,31 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef class SCRAMAuthentication: + cdef: + readonly bytes authentication_method + readonly bytes authorization_message + readonly bytes client_channel_binding + readonly bytes client_first_message_bare + readonly bytes client_nonce + readonly bytes client_proof + readonly bytes password_salt + readonly int password_iterations + readonly bytes server_first_message + # server_key is an instance of hmac.HAMC + readonly object server_key + readonly bytes server_nonce + + cdef create_client_first_message(self, str username) + cdef create_client_final_message(self, str password) + cdef parse_server_first_message(self, bytes server_response) + cdef verify_server_final_message(self, bytes server_final_message) + cdef _bytes_xor(self, bytes a, bytes b) + cdef _generate_client_nonce(self, int num_bytes) + cdef _generate_client_proof(self, str password) + cdef _generate_salted_password(self, str password, bytes salt, int iterations) + cdef _normalize_password(self, str original_password) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pyx new file mode 100644 index 0000000..765ddd4 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/scram.pyx @@ -0,0 +1,341 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import base64 +import hashlib +import hmac +import re +import secrets +import stringprep +import unicodedata + + +@cython.final +cdef class SCRAMAuthentication: + """Contains the protocol for generating and a SCRAM hashed password. + + Since PostgreSQL 10, the option to hash passwords using the SCRAM-SHA-256 + method was added. This module follows the defined protocol, which can be + referenced from here: + + https://www.postgresql.org/docs/current/sasl-authentication.html#SASL-SCRAM-SHA-256 + + libpq references the following RFCs that it uses for implementation: + + * RFC 5802 + * RFC 5803 + * RFC 7677 + + The protocol works as such: + + - A client connets to the server. The server requests the client to begin + SASL authentication using SCRAM and presents a client with the methods it + supports. At present, those are SCRAM-SHA-256, and, on servers that are + built with OpenSSL and + are PG11+, SCRAM-SHA-256-PLUS (which supports channel binding, more on that + below) + + - The client sends a "first message" to the server, where it chooses which + method to authenticate with, and sends, along with the method, an indication + of channel binding (we disable for now), a nonce, and the username. + (Technically, PostgreSQL ignores the username as it already has it from the + initical connection, but we add it for completeness) + + - The server responds with a "first message" in which it extends the nonce, + as well as a password salt and the number of iterations to hash the password + with. The client validates that the new nonce contains the first part of the + client's original nonce + + - The client generates a salted password, but does not sent this up to the + server. Instead, the client follows the SCRAM algorithm (RFC5802) to + generate a proof. This proof is sent aspart of a client "final message" to + the server for it to validate. + + - The server validates the proof. If it is valid, the server sends a + verification code for the client to verify that the server came to the same + proof the client did. PostgreSQL immediately sends an AuthenticationOK + response right after a valid negotiation. If the password the client + provided was invalid, then authentication fails. + + (The beauty of this is that the salted password is never transmitted over + the wire!) + + PostgreSQL 11 added support for the channel binding (i.e. + SCRAM-SHA-256-PLUS) but to do some ongoing discussion, there is a conscious + decision by several driver authors to not support it as of yet. As such, the + channel binding parameter is hard-coded to "n" for now, but can be updated + to support other channel binding methos in the future + """ + AUTHENTICATION_METHODS = [b"SCRAM-SHA-256"] + DEFAULT_CLIENT_NONCE_BYTES = 24 + DIGEST = hashlib.sha256 + REQUIREMENTS_CLIENT_FINAL_MESSAGE = ['client_channel_binding', + 'server_nonce'] + REQUIREMENTS_CLIENT_PROOF = ['password_iterations', 'password_salt', + 'server_first_message', 'server_nonce'] + SASLPREP_PROHIBITED = ( + stringprep.in_table_a1, # PostgreSQL treats this as prohibited + stringprep.in_table_c12, + stringprep.in_table_c21_c22, + stringprep.in_table_c3, + stringprep.in_table_c4, + stringprep.in_table_c5, + stringprep.in_table_c6, + stringprep.in_table_c7, + stringprep.in_table_c8, + stringprep.in_table_c9, + ) + + def __cinit__(self, bytes authentication_method): + self.authentication_method = authentication_method + self.authorization_message = None + # channel binding is turned off for the time being + self.client_channel_binding = b"n,," + self.client_first_message_bare = None + self.client_nonce = None + self.client_proof = None + self.password_salt = None + # self.password_iterations = None + self.server_first_message = None + self.server_key = None + self.server_nonce = None + + cdef create_client_first_message(self, str username): + """Create the initial client message for SCRAM authentication""" + cdef: + bytes msg + bytes client_first_message + + self.client_nonce = \ + self._generate_client_nonce(self.DEFAULT_CLIENT_NONCE_BYTES) + # set the client first message bare here, as it's used in a later step + self.client_first_message_bare = b"n=" + username.encode("utf-8") + \ + b",r=" + self.client_nonce + # put together the full message here + msg = bytes() + msg += self.authentication_method + b"\0" + client_first_message = self.client_channel_binding + \ + self.client_first_message_bare + msg += (len(client_first_message)).to_bytes(4, byteorder='big') + \ + client_first_message + return msg + + cdef create_client_final_message(self, str password): + """Create the final client message as part of SCRAM authentication""" + cdef: + bytes msg + + if any([getattr(self, val) is None for val in + self.REQUIREMENTS_CLIENT_FINAL_MESSAGE]): + raise Exception( + "you need values from server to generate a client proof") + + # normalize the password using the SASLprep algorithm in RFC 4013 + password = self._normalize_password(password) + + # generate the client proof + self.client_proof = self._generate_client_proof(password=password) + msg = bytes() + msg += b"c=" + base64.b64encode(self.client_channel_binding) + \ + b",r=" + self.server_nonce + \ + b",p=" + base64.b64encode(self.client_proof) + return msg + + cdef parse_server_first_message(self, bytes server_response): + """Parse the response from the first message from the server""" + self.server_first_message = server_response + try: + self.server_nonce = re.search(b'r=([^,]+),', + self.server_first_message).group(1) + except IndexError: + raise Exception("could not get nonce") + if not self.server_nonce.startswith(self.client_nonce): + raise Exception("invalid nonce") + try: + self.password_salt = re.search(b's=([^,]+),', + self.server_first_message).group(1) + except IndexError: + raise Exception("could not get salt") + try: + self.password_iterations = int(re.search(b'i=(\d+),?', + self.server_first_message).group(1)) + except (IndexError, TypeError, ValueError): + raise Exception("could not get iterations") + + cdef verify_server_final_message(self, bytes server_final_message): + """Verify the final message from the server""" + cdef: + bytes server_signature + + try: + server_signature = re.search(b'v=([^,]+)', + server_final_message).group(1) + except IndexError: + raise Exception("could not get server signature") + + verify_server_signature = hmac.new(self.server_key.digest(), + self.authorization_message, self.DIGEST) + # validate the server signature against the verifier + return server_signature == base64.b64encode( + verify_server_signature.digest()) + + cdef _bytes_xor(self, bytes a, bytes b): + """XOR two bytestrings together""" + return bytes(a_i ^ b_i for a_i, b_i in zip(a, b)) + + cdef _generate_client_nonce(self, int num_bytes): + cdef: + bytes token + + token = secrets.token_bytes(num_bytes) + + return base64.b64encode(token) + + cdef _generate_client_proof(self, str password): + """need to ensure a server response exists, i.e. """ + cdef: + bytes salted_password + + if any([getattr(self, val) is None for val in + self.REQUIREMENTS_CLIENT_PROOF]): + raise Exception( + "you need values from server to generate a client proof") + # generate a salt password + salted_password = self._generate_salted_password(password, + self.password_salt, self.password_iterations) + # client key is derived from the salted password + client_key = hmac.new(salted_password, b"Client Key", self.DIGEST) + # this allows us to compute the stored key that is residing on the server + stored_key = self.DIGEST(client_key.digest()) + # as well as compute the server key + self.server_key = hmac.new(salted_password, b"Server Key", self.DIGEST) + # build the authorization message that will be used in the + # client signature + # the "c=" portion is for the channel binding, but this is not + # presently implemented + self.authorization_message = self.client_first_message_bare + b"," + \ + self.server_first_message + b",c=" + \ + base64.b64encode(self.client_channel_binding) + \ + b",r=" + self.server_nonce + # sign! + client_signature = hmac.new(stored_key.digest(), + self.authorization_message, self.DIGEST) + # and the proof + return self._bytes_xor(client_key.digest(), client_signature.digest()) + + cdef _generate_salted_password(self, str password, bytes salt, int iterations): + """This follows the "Hi" algorithm specified in RFC5802""" + cdef: + bytes p + bytes s + bytes u + + # convert the password to a binary string - UTF8 is safe for SASL + # (though there are SASLPrep rules) + p = password.encode("utf8") + # the salt needs to be base64 decoded -- full binary must be used + s = base64.b64decode(salt) + # the initial signature is the salt with a terminator of a 32-bit string + # ending in 1 + ui = hmac.new(p, s + b'\x00\x00\x00\x01', self.DIGEST) + # grab the initial digest + u = ui.digest() + # for X number of iterations, recompute the HMAC signature against the + # password and the latest iteration of the hash, and XOR it with the + # previous version + for x in range(iterations - 1): + ui = hmac.new(p, ui.digest(), hashlib.sha256) + # this is a fancy way of XORing two byte strings together + u = self._bytes_xor(u, ui.digest()) + return u + + cdef _normalize_password(self, str original_password): + """Normalize the password using the SASLprep from RFC4013""" + cdef: + str normalized_password + + # Note: Per the PostgreSQL documentation, PostgreSWL does not require + # UTF-8 to be used for the password, but will perform SASLprep on the + # password regardless. + # If the password is not valid UTF-8, PostgreSQL will then **not** use + # SASLprep processing. + # If the password fails SASLprep, the password should still be sent + # See: https://www.postgresql.org/docs/current/sasl-authentication.html + # and + # https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/common/saslprep.c + # using the `pg_saslprep` function + normalized_password = original_password + # if the original password is an ASCII string or fails to encode as a + # UTF-8 string, then no further action is needed + try: + original_password.encode("ascii") + except UnicodeEncodeError: + pass + else: + return original_password + + # Step 1 of SASLPrep: Map. Per the algorithm, we map non-ascii space + # characters to ASCII spaces (\x20 or \u0020, but we will use ' ') and + # commonly mapped to nothing characters are removed + # Table C.1.2 -- non-ASCII spaces + # Table B.1 -- "Commonly mapped to nothing" + normalized_password = u"".join( + ' ' if stringprep.in_table_c12(c) else c + for c in tuple(normalized_password) if not stringprep.in_table_b1(c) + ) + + # If at this point the password is empty, PostgreSQL uses the original + # password + if not normalized_password: + return original_password + + # Step 2 of SASLPrep: Normalize. Normalize the password using the + # Unicode normalization algorithm to NFKC form + normalized_password = unicodedata.normalize('NFKC', normalized_password) + + # If the password is not empty, PostgreSQL uses the original password + if not normalized_password: + return original_password + + normalized_password_tuple = tuple(normalized_password) + + # Step 3 of SASLPrep: Prohobited characters. If PostgreSQL detects any + # of the prohibited characters in SASLPrep, it will use the original + # password + # We also include "unassigned code points" in the prohibited character + # category as PostgreSQL does the same + for c in normalized_password_tuple: + if any( + in_prohibited_table(c) + for in_prohibited_table in self.SASLPREP_PROHIBITED + ): + return original_password + + # Step 4 of SASLPrep: Bi-directional characters. PostgreSQL follows the + # rules for bi-directional characters laid on in RFC3454 Sec. 6 which + # are: + # 1. Characters in RFC 3454 Sec 5.8 are prohibited (C.8) + # 2. If a string contains a RandALCat character, it cannot containy any + # LCat character + # 3. If the string contains any RandALCat character, an RandALCat + # character must be the first and last character of the string + # RandALCat characters are found in table D.1, whereas LCat are in D.2 + if any(stringprep.in_table_d1(c) for c in normalized_password_tuple): + # if the first character or the last character are not in D.1, + # return the original password + if not (stringprep.in_table_d1(normalized_password_tuple[0]) and + stringprep.in_table_d1(normalized_password_tuple[-1])): + return original_password + + # if any characters are in D.2, use the original password + if any( + stringprep.in_table_d2(c) for c in normalized_password_tuple + ): + return original_password + + # return the normalized password + return normalized_password diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pxd b/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pxd new file mode 100644 index 0000000..41131cd --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pxd @@ -0,0 +1,30 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +cdef class ConnectionSettings(pgproto.CodecContext): + cdef: + str _encoding + object _codec + dict _settings + bint _is_utf8 + DataCodecConfig _data_codecs + + cdef add_setting(self, str name, str val) + cdef is_encoding_utf8(self) + cpdef get_text_codec(self) + cpdef inline register_data_types(self, types) + cpdef inline add_python_codec( + self, typeoid, typename, typeschema, typekind, encoder, + decoder, format) + cpdef inline remove_python_codec( + self, typeoid, typename, typeschema) + cpdef inline clear_type_cache(self) + cpdef inline set_builtin_type_codec( + self, typeoid, typename, typeschema, typekind, alias_to, format) + cpdef inline Codec get_data_codec( + self, uint32_t oid, ServerDataFormat format=*, + bint ignore_custom_codec=*) diff --git a/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pyx b/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pyx new file mode 100644 index 0000000..b4cfa39 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/protocol/settings.pyx @@ -0,0 +1,104 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +from asyncpg import exceptions + + +@cython.final +cdef class ConnectionSettings(pgproto.CodecContext): + + def __cinit__(self, conn_key): + self._encoding = 'utf-8' + self._is_utf8 = True + self._settings = {} + self._codec = codecs.lookup('utf-8') + self._data_codecs = DataCodecConfig(conn_key) + + cdef add_setting(self, str name, str val): + self._settings[name] = val + if name == 'client_encoding': + py_enc = get_python_encoding(val) + self._codec = codecs.lookup(py_enc) + self._encoding = self._codec.name + self._is_utf8 = self._encoding == 'utf-8' + + cdef is_encoding_utf8(self): + return self._is_utf8 + + cpdef get_text_codec(self): + return self._codec + + cpdef inline register_data_types(self, types): + self._data_codecs.add_types(types) + + cpdef inline add_python_codec(self, typeoid, typename, typeschema, + typekind, encoder, decoder, format): + cdef: + ServerDataFormat _format + ClientExchangeFormat xformat + + if format == 'binary': + _format = PG_FORMAT_BINARY + xformat = PG_XFORMAT_OBJECT + elif format == 'text': + _format = PG_FORMAT_TEXT + xformat = PG_XFORMAT_OBJECT + elif format == 'tuple': + _format = PG_FORMAT_ANY + xformat = PG_XFORMAT_TUPLE + else: + raise exceptions.InterfaceError( + 'invalid `format` argument, expected {}, got {!r}'.format( + "'text', 'binary' or 'tuple'", format + )) + + self._data_codecs.add_python_codec(typeoid, typename, typeschema, + typekind, encoder, decoder, + _format, xformat) + + cpdef inline remove_python_codec(self, typeoid, typename, typeschema): + self._data_codecs.remove_python_codec(typeoid, typename, typeschema) + + cpdef inline clear_type_cache(self): + self._data_codecs.clear_type_cache() + + cpdef inline set_builtin_type_codec(self, typeoid, typename, typeschema, + typekind, alias_to, format): + cdef: + ServerDataFormat _format + + if format is None: + _format = PG_FORMAT_ANY + elif format == 'binary': + _format = PG_FORMAT_BINARY + elif format == 'text': + _format = PG_FORMAT_TEXT + else: + raise exceptions.InterfaceError( + 'invalid `format` argument, expected {}, got {!r}'.format( + "'text' or 'binary'", format + )) + + self._data_codecs.set_builtin_type_codec(typeoid, typename, typeschema, + typekind, alias_to, _format) + + cpdef inline Codec get_data_codec(self, uint32_t oid, + ServerDataFormat format=PG_FORMAT_ANY, + bint ignore_custom_codec=False): + return self._data_codecs.get_codec(oid, format, ignore_custom_codec) + + def __getattr__(self, name): + if not name.startswith('_'): + try: + return self._settings[name] + except KeyError: + raise AttributeError(name) from None + + return object.__getattribute__(self, name) + + def __repr__(self): + return ''.format(self._settings) diff --git a/sbsheriff/Lib/site-packages/asyncpg/serverversion.py b/sbsheriff/Lib/site-packages/asyncpg/serverversion.py new file mode 100644 index 0000000..31568a2 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/serverversion.py @@ -0,0 +1,60 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import re + +from .types import ServerVersion + +version_regex = re.compile( + r"(Postgre[^\s]*)?\s*" + r"(?P[0-9]+)\.?" + r"((?P[0-9]+)\.?)?" + r"(?P[0-9]+)?" + r"(?P[a-z]+)?" + r"(?P[0-9]+)?" +) + + +def split_server_version_string(version_string): + version_match = version_regex.search(version_string) + + if version_match is None: + raise ValueError( + "Unable to parse Postgres " + f'version from "{version_string}"' + ) + + version = version_match.groupdict() + for ver_key, ver_value in version.items(): + # Cast all possible versions parts to int + try: + version[ver_key] = int(ver_value) + except (TypeError, ValueError): + pass + + if version.get("major") < 10: + return ServerVersion( + version.get("major"), + version.get("minor") or 0, + version.get("micro") or 0, + version.get("releaselevel") or "final", + version.get("serial") or 0, + ) + + # Since PostgreSQL 10 the versioning scheme has changed. + # 10.x really means 10.0.x. While parsing 10.1 + # as (10, 1) may seem less confusing, in practice most + # version checks are written as version[:2], and we + # want to keep that behaviour consistent, i.e not fail + # a major version check due to a bugfix release. + return ServerVersion( + version.get("major"), + 0, + version.get("minor") or 0, + version.get("releaselevel") or "final", + version.get("serial") or 0, + ) diff --git a/sbsheriff/Lib/site-packages/asyncpg/transaction.py b/sbsheriff/Lib/site-packages/asyncpg/transaction.py new file mode 100644 index 0000000..2d7ba49 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/transaction.py @@ -0,0 +1,238 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import enum + +from . import connresource +from . import exceptions as apg_errors + + +class TransactionState(enum.Enum): + NEW = 0 + STARTED = 1 + COMMITTED = 2 + ROLLEDBACK = 3 + FAILED = 4 + + +ISOLATION_LEVELS = {'read_committed', 'serializable', 'repeatable_read'} +ISOLATION_LEVELS_BY_VALUE = { + 'read committed': 'read_committed', + 'serializable': 'serializable', + 'repeatable read': 'repeatable_read', +} + + +class Transaction(connresource.ConnectionResource): + """Represents a transaction or savepoint block. + + Transactions are created by calling the + :meth:`Connection.transaction() ` + function. + """ + + __slots__ = ('_connection', '_isolation', '_readonly', '_deferrable', + '_state', '_nested', '_id', '_managed') + + def __init__(self, connection, isolation, readonly, deferrable): + super().__init__(connection) + + if isolation and isolation not in ISOLATION_LEVELS: + raise ValueError( + 'isolation is expected to be either of {}, ' + 'got {!r}'.format(ISOLATION_LEVELS, isolation)) + + self._isolation = isolation + self._readonly = readonly + self._deferrable = deferrable + self._state = TransactionState.NEW + self._nested = False + self._id = None + self._managed = False + + async def __aenter__(self): + if self._managed: + raise apg_errors.InterfaceError( + 'cannot enter context: already in an `async with` block') + self._managed = True + await self.start() + + async def __aexit__(self, extype, ex, tb): + try: + self._check_conn_validity('__aexit__') + except apg_errors.InterfaceError: + if extype is GeneratorExit: + # When a PoolAcquireContext is being exited, and there + # is an open transaction in an async generator that has + # not been iterated fully, there is a possibility that + # Pool.release() would race with this __aexit__(), since + # both would be in concurrent tasks. In such case we + # yield to Pool.release() to do the ROLLBACK for us. + # See https://github.com/MagicStack/asyncpg/issues/232 + # for an example. + return + else: + raise + + try: + if extype is not None: + await self.__rollback() + else: + await self.__commit() + finally: + self._managed = False + + @connresource.guarded + async def start(self): + """Enter the transaction or savepoint block.""" + self.__check_state_base('start') + if self._state is TransactionState.STARTED: + raise apg_errors.InterfaceError( + 'cannot start; the transaction is already started') + + con = self._connection + + if con._top_xact is None: + if con._protocol.is_in_transaction(): + raise apg_errors.InterfaceError( + 'cannot use Connection.transaction() in ' + 'a manually started transaction') + con._top_xact = self + else: + # Nested transaction block + if self._isolation: + top_xact_isolation = con._top_xact._isolation + if top_xact_isolation is None: + top_xact_isolation = ISOLATION_LEVELS_BY_VALUE[ + await self._connection.fetchval( + 'SHOW transaction_isolation;')] + if self._isolation != top_xact_isolation: + raise apg_errors.InterfaceError( + 'nested transaction has a different isolation level: ' + 'current {!r} != outer {!r}'.format( + self._isolation, top_xact_isolation)) + self._nested = True + + if self._nested: + self._id = con._get_unique_id('savepoint') + query = 'SAVEPOINT {};'.format(self._id) + else: + query = 'BEGIN' + if self._isolation == 'read_committed': + query += ' ISOLATION LEVEL READ COMMITTED' + elif self._isolation == 'repeatable_read': + query += ' ISOLATION LEVEL REPEATABLE READ' + elif self._isolation == 'serializable': + query += ' ISOLATION LEVEL SERIALIZABLE' + if self._readonly: + query += ' READ ONLY' + if self._deferrable: + query += ' DEFERRABLE' + query += ';' + + try: + await self._connection.execute(query) + except BaseException: + self._state = TransactionState.FAILED + raise + else: + self._state = TransactionState.STARTED + + def __check_state_base(self, opname): + if self._state is TransactionState.COMMITTED: + raise apg_errors.InterfaceError( + 'cannot {}; the transaction is already committed'.format( + opname)) + if self._state is TransactionState.ROLLEDBACK: + raise apg_errors.InterfaceError( + 'cannot {}; the transaction is already rolled back'.format( + opname)) + if self._state is TransactionState.FAILED: + raise apg_errors.InterfaceError( + 'cannot {}; the transaction is in error state'.format( + opname)) + + def __check_state(self, opname): + if self._state is not TransactionState.STARTED: + if self._state is TransactionState.NEW: + raise apg_errors.InterfaceError( + 'cannot {}; the transaction is not yet started'.format( + opname)) + self.__check_state_base(opname) + + async def __commit(self): + self.__check_state('commit') + + if self._connection._top_xact is self: + self._connection._top_xact = None + + if self._nested: + query = 'RELEASE SAVEPOINT {};'.format(self._id) + else: + query = 'COMMIT;' + + try: + await self._connection.execute(query) + except BaseException: + self._state = TransactionState.FAILED + raise + else: + self._state = TransactionState.COMMITTED + + async def __rollback(self): + self.__check_state('rollback') + + if self._connection._top_xact is self: + self._connection._top_xact = None + + if self._nested: + query = 'ROLLBACK TO {};'.format(self._id) + else: + query = 'ROLLBACK;' + + try: + await self._connection.execute(query) + except BaseException: + self._state = TransactionState.FAILED + raise + else: + self._state = TransactionState.ROLLEDBACK + + @connresource.guarded + async def commit(self): + """Exit the transaction or savepoint block and commit changes.""" + if self._managed: + raise apg_errors.InterfaceError( + 'cannot manually commit from within an `async with` block') + await self.__commit() + + @connresource.guarded + async def rollback(self): + """Exit the transaction or savepoint block and rollback changes.""" + if self._managed: + raise apg_errors.InterfaceError( + 'cannot manually rollback from within an `async with` block') + await self.__rollback() + + def __repr__(self): + attrs = [] + attrs.append('state:{}'.format(self._state.name.lower())) + + if self._isolation is not None: + attrs.append(self._isolation) + if self._readonly: + attrs.append('readonly') + if self._deferrable: + attrs.append('deferrable') + + if self.__class__.__module__.startswith('asyncpg.'): + mod = 'asyncpg' + else: + mod = self.__class__.__module__ + + return '<{}.{} {} {:#x}>'.format( + mod, self.__class__.__name__, ' '.join(attrs), id(self)) diff --git a/sbsheriff/Lib/site-packages/asyncpg/types.py b/sbsheriff/Lib/site-packages/asyncpg/types.py new file mode 100644 index 0000000..bd5813f --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/types.py @@ -0,0 +1,177 @@ +# Copyright (C) 2016-present the asyncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import collections + +from asyncpg.pgproto.types import ( + BitString, Point, Path, Polygon, + Box, Line, LineSegment, Circle, +) + + +__all__ = ( + 'Type', 'Attribute', 'Range', 'BitString', 'Point', 'Path', 'Polygon', + 'Box', 'Line', 'LineSegment', 'Circle', 'ServerVersion', +) + + +Type = collections.namedtuple('Type', ['oid', 'name', 'kind', 'schema']) +Type.__doc__ = 'Database data type.' +Type.oid.__doc__ = 'OID of the type.' +Type.name.__doc__ = 'Type name. For example "int2".' +Type.kind.__doc__ = \ + 'Type kind. Can be "scalar", "array", "composite" or "range".' +Type.schema.__doc__ = 'Name of the database schema that defines the type.' + + +Attribute = collections.namedtuple('Attribute', ['name', 'type']) +Attribute.__doc__ = 'Database relation attribute.' +Attribute.name.__doc__ = 'Attribute name.' +Attribute.type.__doc__ = 'Attribute data type :class:`asyncpg.types.Type`.' + + +ServerVersion = collections.namedtuple( + 'ServerVersion', ['major', 'minor', 'micro', 'releaselevel', 'serial']) +ServerVersion.__doc__ = 'PostgreSQL server version tuple.' + + +class Range: + """Immutable representation of PostgreSQL `range` type.""" + + __slots__ = '_lower', '_upper', '_lower_inc', '_upper_inc', '_empty' + + def __init__(self, lower=None, upper=None, *, + lower_inc=True, upper_inc=False, + empty=False): + self._empty = empty + if empty: + self._lower = self._upper = None + self._lower_inc = self._upper_inc = False + else: + self._lower = lower + self._upper = upper + self._lower_inc = lower is not None and lower_inc + self._upper_inc = upper is not None and upper_inc + + @property + def lower(self): + return self._lower + + @property + def lower_inc(self): + return self._lower_inc + + @property + def lower_inf(self): + return self._lower is None and not self._empty + + @property + def upper(self): + return self._upper + + @property + def upper_inc(self): + return self._upper_inc + + @property + def upper_inf(self): + return self._upper is None and not self._empty + + @property + def isempty(self): + return self._empty + + def _issubset_lower(self, other): + if other._lower is None: + return True + if self._lower is None: + return False + + return self._lower > other._lower or ( + self._lower == other._lower + and (other._lower_inc or not self._lower_inc) + ) + + def _issubset_upper(self, other): + if other._upper is None: + return True + if self._upper is None: + return False + + return self._upper < other._upper or ( + self._upper == other._upper + and (other._upper_inc or not self._upper_inc) + ) + + def issubset(self, other): + if self._empty: + return True + if other._empty: + return False + + return self._issubset_lower(other) and self._issubset_upper(other) + + def issuperset(self, other): + return other.issubset(self) + + def __bool__(self): + return not self._empty + + def __eq__(self, other): + if not isinstance(other, Range): + return NotImplemented + + return ( + self._lower, + self._upper, + self._lower_inc, + self._upper_inc, + self._empty + ) == ( + other._lower, + other._upper, + other._lower_inc, + other._upper_inc, + other._empty + ) + + def __hash__(self): + return hash(( + self._lower, + self._upper, + self._lower_inc, + self._upper_inc, + self._empty + )) + + def __repr__(self): + if self._empty: + desc = 'empty' + else: + if self._lower is None or not self._lower_inc: + lb = '(' + else: + lb = '[' + + if self._lower is not None: + lb += repr(self._lower) + + if self._upper is not None: + ub = repr(self._upper) + else: + ub = '' + + if self._upper is None or not self._upper_inc: + ub += ')' + else: + ub += ']' + + desc = '{}, {}'.format(lb, ub) + + return ''.format(desc) + + __str__ = __repr__ diff --git a/sbsheriff/Lib/site-packages/asyncpg/utils.py b/sbsheriff/Lib/site-packages/asyncpg/utils.py new file mode 100644 index 0000000..3940e04 --- /dev/null +++ b/sbsheriff/Lib/site-packages/asyncpg/utils.py @@ -0,0 +1,45 @@ +# Copyright (C) 2016-present the ayncpg authors and contributors +# +# +# This module is part of asyncpg and is released under +# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 + + +import re + + +def _quote_ident(ident): + return '"{}"'.format(ident.replace('"', '""')) + + +def _quote_literal(string): + return "'{}'".format(string.replace("'", "''")) + + +async def _mogrify(conn, query, args): + """Safely inline arguments to query text.""" + # Introspect the target query for argument types and + # build a list of safely-quoted fully-qualified type names. + ps = await conn.prepare(query) + paramtypes = [] + for t in ps.get_parameters(): + if t.name.endswith('[]'): + pname = '_' + t.name[:-2] + else: + pname = t.name + + paramtypes.append('{}.{}'.format( + _quote_ident(t.schema), _quote_ident(pname))) + del ps + + # Use Postgres to convert arguments to text representation + # by casting each value to text. + cols = ['quote_literal(${}::{}::text)'.format(i, t) + for i, t in enumerate(paramtypes, start=1)] + + textified = await conn.fetchrow( + 'SELECT {cols}'.format(cols=', '.join(cols)), *args) + + # Finally, replace $n references with text values. + return re.sub( + r'\$(\d+)\b', lambda m: textified[int(m.group(1)) - 1], query) diff --git a/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/INSTALLER b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/METADATA b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/METADATA new file mode 100644 index 0000000..68aaade --- /dev/null +++ b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/METADATA @@ -0,0 +1,119 @@ +Metadata-Version: 2.1 +Name: beautifulsoup4 +Version: 4.11.1 +Summary: Screen-scraping library +Home-page: https://www.crummy.com/software/BeautifulSoup/bs4/ +Author: Leonard Richardson +Author-email: leonardr@segfault.org +License: MIT +Download-URL: https://www.crummy.com/software/BeautifulSoup/bs4/download/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: XML +Classifier: Topic :: Text Processing :: Markup :: SGML +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6.0 +Description-Content-Type: text/markdown +Provides-Extra: lxml +Provides-Extra: html5lib +Requires-Dist: soupsieve (>1.2) +Provides-Extra: html5lib +Requires-Dist: html5lib; extra == 'html5lib' +Provides-Extra: lxml +Requires-Dist: lxml; extra == 'lxml' + +Beautiful Soup is a library that makes it easy to scrape information +from web pages. It sits atop an HTML or XML parser, providing Pythonic +idioms for iterating, searching, and modifying the parse tree. + +# Quick start + +``` +>>> from bs4 import BeautifulSoup +>>> soup = BeautifulSoup("

SomebadHTML") +>>> print(soup.prettify()) + + +

+ Some + + bad + + HTML + + +

+ + +>>> soup.find(text="bad") +'bad' +>>> soup.i +HTML +# +>>> soup = BeautifulSoup("SomebadXML", "xml") +# +>>> print(soup.prettify()) + + + Some + + bad + + XML + + +``` + +To go beyond the basics, [comprehensive documentation is available](https://www.crummy.com/software/BeautifulSoup/bs4/doc/). + +# Links + +* [Homepage](https://www.crummy.com/software/BeautifulSoup/bs4/) +* [Documentation](https://www.crummy.com/software/BeautifulSoup/bs4/doc/) +* [Discussion group](https://groups.google.com/group/beautifulsoup/) +* [Development](https://code.launchpad.net/beautifulsoup/) +* [Bug tracker](https://bugs.launchpad.net/beautifulsoup/) +* [Complete changelog](https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG) + +# Note on Python 2 sunsetting + +Beautiful Soup's support for Python 2 was discontinued on December 31, +2020: one year after the sunset date for Python 2 itself. From this +point onward, new Beautiful Soup development will exclusively target +Python 3. The final release of Beautiful Soup 4 to support Python 2 +was 4.9.3. + +# Supporting the project + +If you use Beautiful Soup as part of your professional work, please consider a +[Tidelift subscription](https://tidelift.com/subscription/pkg/pypi-beautifulsoup4?utm_source=pypi-beautifulsoup4&utm_medium=referral&utm_campaign=readme). +This will support many of the free software projects your organization +depends on, not just Beautiful Soup. + +If you use Beautiful Soup for personal projects, the best way to say +thank you is to read +[Tool Safety](https://www.crummy.com/software/BeautifulSoup/zine/), a zine I +wrote about what Beautiful Soup has taught me about software +development. + +# Building the documentation + +The bs4/doc/ directory contains full documentation in Sphinx +format. Run `make html` in that directory to create HTML +documentation. + +# Running the unit tests + +Beautiful Soup supports unit test discovery using Pytest: + +``` +$ pytest +``` + + + diff --git a/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/RECORD b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/RECORD new file mode 100644 index 0000000..5eb8d34 --- /dev/null +++ b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/RECORD @@ -0,0 +1,53 @@ +beautifulsoup4-4.11.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +beautifulsoup4-4.11.1.dist-info/METADATA,sha256=zefuevXradYVap7gKfvpSwLzvs8FSBONKfY8j5HIj-k,3525 +beautifulsoup4-4.11.1.dist-info/RECORD,, +beautifulsoup4-4.11.1.dist-info/WHEEL,sha256=NzFAKnL7g-U64xnS1s5e3mJnxKpOTeOtlXdFwS9yNXI,92 +beautifulsoup4-4.11.1.dist-info/top_level.txt,sha256=gpUVJcTwW3q7-QGp6tAEomZsskknmgSqVe6xn1C0jJI,26 +bs4/__init__.py,sha256=v5VuQqegAzN3bo4kjs6fT0391zvKQGhvBlV841aJ86A,32832 +bs4/__pycache__/__init__.cpython-310.pyc,, +bs4/__pycache__/dammit.cpython-310.pyc,, +bs4/__pycache__/diagnose.cpython-310.pyc,, +bs4/__pycache__/element.cpython-310.pyc,, +bs4/__pycache__/formatter.cpython-310.pyc,, +bs4/builder/__init__.py,sha256=Ny7NmZu3XDndp_elCrz2W8K9OUjrAjHQuYianJR6RDM,24378 +bs4/builder/__pycache__/__init__.cpython-310.pyc,, +bs4/builder/__pycache__/_html5lib.cpython-310.pyc,, +bs4/builder/__pycache__/_htmlparser.cpython-310.pyc,, +bs4/builder/__pycache__/_lxml.cpython-310.pyc,, +bs4/builder/_html5lib.py,sha256=YFOKZf3E22AEp_V2k49U5LmF26bpSZixZrZGSwM_iCU,18946 +bs4/builder/_htmlparser.py,sha256=6RYsAXWb_ppMZGB-7lpEmJQ73tpzSu_CJ5QPaFcJu-Y,19169 +bs4/builder/_lxml.py,sha256=ik6BFGnxAzV2-21S_Wc-7ZeA174muSA_ZhmpnAe3g0E,14904 +bs4/dammit.py,sha256=G0cQfsEqfwJ-FIQMkXgCJwSHMn7t9vPepCrud6fZEKk,41158 +bs4/diagnose.py,sha256=MRbN2bJSpa8VFt8HemqP8BK9hL5ronCxZmrfGRZYwBg,7911 +bs4/element.py,sha256=jjP-cIA3oWJrAhky61wayot92SQFKzJMEE8My6uTPDM,86753 +bs4/formatter.py,sha256=f5UBtvW9twrrQeLjBeLaKp4ntZpUDmUBY8jF3BiLraM,7206 +bs4/tests/__init__.py,sha256=yFvfhDv5vOeho6NEZcI0AIpmuEe9bvC7TsqNp3hDGxQ,49185 +bs4/tests/__pycache__/__init__.cpython-310.pyc,, +bs4/tests/__pycache__/test_builder.cpython-310.pyc,, +bs4/tests/__pycache__/test_builder_registry.cpython-310.pyc,, +bs4/tests/__pycache__/test_dammit.cpython-310.pyc,, +bs4/tests/__pycache__/test_docs.cpython-310.pyc,, +bs4/tests/__pycache__/test_element.cpython-310.pyc,, +bs4/tests/__pycache__/test_formatter.cpython-310.pyc,, +bs4/tests/__pycache__/test_html5lib.cpython-310.pyc,, +bs4/tests/__pycache__/test_htmlparser.cpython-310.pyc,, +bs4/tests/__pycache__/test_lxml.cpython-310.pyc,, +bs4/tests/__pycache__/test_navigablestring.cpython-310.pyc,, +bs4/tests/__pycache__/test_pageelement.cpython-310.pyc,, +bs4/tests/__pycache__/test_soup.cpython-310.pyc,, +bs4/tests/__pycache__/test_tag.cpython-310.pyc,, +bs4/tests/__pycache__/test_tree.cpython-310.pyc,, +bs4/tests/test_builder.py,sha256=nc2JE5EMrEf-p24qhf2R8qAV5PpFiOuNpYCmtmCjlTI,1115 +bs4/tests/test_builder_registry.py,sha256=_Vh2CyYzv4BKoJdCa7s4lsBLUQptskfgiSn3U3810CQ,5068 +bs4/tests/test_dammit.py,sha256=lS3EWCYCtxVE4fC_J2eTcXiChhuaL4Vcbo94B5geIL4,15680 +bs4/tests/test_docs.py,sha256=xoAxnUfoQ7aRqGImwW_9BJDU8WNMZHIuvWqVepvWXt8,1127 +bs4/tests/test_element.py,sha256=92oRSRoGk8gIXAbAGHErKzocx2MK32TqcQdUJ-dGQMo,2377 +bs4/tests/test_formatter.py,sha256=0qV9H7mMDBcnFFH-dwNCrSm2zNi_40WMB2GMcV35PoY,4128 +bs4/tests/test_html5lib.py,sha256=X6r13jfJ-OmG6SL_hyfFNXWs7sEEq_1TmCzCJclxvbA,8246 +bs4/tests/test_htmlparser.py,sha256=BFCspIdhkr8Bss-kHufeNcwa_lvJpVWKgJskPoZgZ7E,5532 +bs4/tests/test_lxml.py,sha256=deaf1YOrR8I0T5yZAV4TDxcAXHzVhdlnsSajGpBoxs0,7376 +bs4/tests/test_navigablestring.py,sha256=RGSgziNf7cZnYdEPsoqL1B2I68TUJp1JmEQVxbh_ryA,5081 +bs4/tests/test_pageelement.py,sha256=fpOU3W5IAz92b0A2VxWKkI5pApObMB17cNzXNF85FfA,27792 +bs4/tests/test_soup.py,sha256=CUnK-rDccIlKMLBP4AweCqRDbPt3Lqzln_BpnBgKm4M,17810 +bs4/tests/test_tag.py,sha256=f19uie7QehvgvhIqNWfjDRR4TKa-ftm_RRoo6LXZyqk,9016 +bs4/tests/test_tree.py,sha256=y9Qvs8nnYj6RnGRSxtoYQQwSja-DlbtukCVs0neVwyU,47557 diff --git a/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/WHEEL b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/WHEEL new file mode 100644 index 0000000..bff023e --- /dev/null +++ b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/top_level.txt b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/top_level.txt new file mode 100644 index 0000000..34b5955 --- /dev/null +++ b/sbsheriff/Lib/site-packages/beautifulsoup4-4.11.1.dist-info/top_level.txt @@ -0,0 +1,3 @@ +bs4 +bs4/builder +bs4/tests diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/PKG-INFO b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/PKG-INFO new file mode 100644 index 0000000..805d1d3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/PKG-INFO @@ -0,0 +1,23 @@ +Metadata-Version: 2.1 +Name: bs4 +Version: 0.0.1 +Summary: Screen-scraping library +Home-page: https://pypi.python.org/pypi/beautifulsoup4 +Author: Leonard Richardson +Author-email: leonardr@segfault.org +License: MIT +Download-URL: http://www.crummy.com/software/BeautifulSoup/bs4/download/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: XML +Classifier: Topic :: Text Processing :: Markup :: SGML +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +Use `beautifulsoup4 `_ instead. + diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/SOURCES.txt b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/SOURCES.txt new file mode 100644 index 0000000..b46d1c5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/SOURCES.txt @@ -0,0 +1,7 @@ +setup.cfg +setup.py +bs4.egg-info/PKG-INFO +bs4.egg-info/SOURCES.txt +bs4.egg-info/dependency_links.txt +bs4.egg-info/requires.txt +bs4.egg-info/top_level.txt \ No newline at end of file diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/dependency_links.txt b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/installed-files.txt b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/installed-files.txt new file mode 100644 index 0000000..62cfadb --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/installed-files.txt @@ -0,0 +1,5 @@ +PKG-INFO +SOURCES.txt +dependency_links.txt +requires.txt +top_level.txt diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/requires.txt b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/requires.txt new file mode 100644 index 0000000..c1f5f71 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/requires.txt @@ -0,0 +1 @@ +beautifulsoup4 diff --git a/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/top_level.txt b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/top_level.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4-0.0.1-py3.10.egg-info/top_level.txt @@ -0,0 +1 @@ + diff --git a/sbsheriff/Lib/site-packages/bs4/__init__.py b/sbsheriff/Lib/site-packages/bs4/__init__.py new file mode 100644 index 0000000..b3c9feb --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/__init__.py @@ -0,0 +1,812 @@ +"""Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend". + +http://www.crummy.com/software/BeautifulSoup/ + +Beautiful Soup uses a pluggable XML or HTML parser to parse a +(possibly invalid) document into a tree representation. Beautiful Soup +provides methods and Pythonic idioms that make it easy to navigate, +search, and modify the parse tree. + +Beautiful Soup works with Python 3.5 and up. It works better if lxml +and/or html5lib is installed. + +For more than you ever wanted to know about Beautiful Soup, see the +documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ +""" + +__author__ = "Leonard Richardson (leonardr@segfault.org)" +__version__ = "4.11.1" +__copyright__ = "Copyright (c) 2004-2022 Leonard Richardson" +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +__all__ = ['BeautifulSoup'] + +from collections import Counter +import os +import re +import sys +import traceback +import warnings + +# The very first thing we do is give a useful error if someone is +# running this code under Python 2. +if sys.version_info.major < 3: + raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.') + +from .builder import ( + builder_registry, + ParserRejectedMarkup, + XMLParsedAsHTMLWarning, +) +from .dammit import UnicodeDammit +from .element import ( + CData, + Comment, + DEFAULT_OUTPUT_ENCODING, + Declaration, + Doctype, + NavigableString, + PageElement, + ProcessingInstruction, + PYTHON_SPECIFIC_ENCODINGS, + ResultSet, + Script, + Stylesheet, + SoupStrainer, + Tag, + TemplateString, + ) + +# Define some custom warnings. +class GuessedAtParserWarning(UserWarning): + """The warning issued when BeautifulSoup has to guess what parser to + use -- probably because no parser was specified in the constructor. + """ + +class MarkupResemblesLocatorWarning(UserWarning): + """The warning issued when BeautifulSoup is given 'markup' that + actually looks like a resource locator -- a URL or a path to a file + on disk. + """ + + +class BeautifulSoup(Tag): + """A data structure representing a parsed HTML or XML document. + + Most of the methods you'll call on a BeautifulSoup object are inherited from + PageElement or Tag. + + Internally, this class defines the basic interface called by the + tree builders when converting an HTML/XML document into a data + structure. The interface abstracts away the differences between + parsers. To write a new tree builder, you'll need to understand + these methods as a whole. + + These methods will be called by the BeautifulSoup constructor: + * reset() + * feed(markup) + + The tree builder may call these methods from its feed() implementation: + * handle_starttag(name, attrs) # See note about return value + * handle_endtag(name) + * handle_data(data) # Appends to the current data node + * endData(containerClass) # Ends the current data node + + No matter how complicated the underlying parser is, you should be + able to build a tree using 'start tag' events, 'end tag' events, + 'data' events, and "done with data" events. + + If you encounter an empty-element tag (aka a self-closing tag, + like HTML's
tag), call handle_starttag and then + handle_endtag. + """ + + # Since BeautifulSoup subclasses Tag, it's possible to treat it as + # a Tag with a .name. This name makes it clear the BeautifulSoup + # object isn't a real markup tag. + ROOT_TAG_NAME = '[document]' + + # If the end-user gives no indication which tree builder they + # want, look for one with these features. + DEFAULT_BUILDER_FEATURES = ['html', 'fast'] + + # A string containing all ASCII whitespace characters, used in + # endData() to detect data chunks that seem 'empty'. + ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' + + NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n" + + def __init__(self, markup="", features=None, builder=None, + parse_only=None, from_encoding=None, exclude_encodings=None, + element_classes=None, **kwargs): + """Constructor. + + :param markup: A string or a file-like object representing + markup to be parsed. + + :param features: Desirable features of the parser to be + used. This may be the name of a specific parser ("lxml", + "lxml-xml", "html.parser", or "html5lib") or it may be the + type of markup to be used ("html", "html5", "xml"). It's + recommended that you name a specific parser, so that + Beautiful Soup gives you the same results across platforms + and virtual environments. + + :param builder: A TreeBuilder subclass to instantiate (or + instance to use) instead of looking one up based on + `features`. You only need to use this if you've implemented a + custom TreeBuilder. + + :param parse_only: A SoupStrainer. Only parts of the document + matching the SoupStrainer will be considered. This is useful + when parsing part of a document that would otherwise be too + large to fit into memory. + + :param from_encoding: A string indicating the encoding of the + document to be parsed. Pass this in if Beautiful Soup is + guessing wrongly about the document's encoding. + + :param exclude_encodings: A list of strings indicating + encodings known to be wrong. Pass this in if you don't know + the document's encoding but you know Beautiful Soup's guess is + wrong. + + :param element_classes: A dictionary mapping BeautifulSoup + classes like Tag and NavigableString, to other classes you'd + like to be instantiated instead as the parse tree is + built. This is useful for subclassing Tag or NavigableString + to modify default behavior. + + :param kwargs: For backwards compatibility purposes, the + constructor accepts certain keyword arguments used in + Beautiful Soup 3. None of these arguments do anything in + Beautiful Soup 4; they will result in a warning and then be + ignored. + + Apart from this, any keyword arguments passed into the + BeautifulSoup constructor are propagated to the TreeBuilder + constructor. This makes it possible to configure a + TreeBuilder by passing in arguments, not just by saying which + one to use. + """ + if 'convertEntities' in kwargs: + del kwargs['convertEntities'] + warnings.warn( + "BS4 does not respect the convertEntities argument to the " + "BeautifulSoup constructor. Entities are always converted " + "to Unicode characters.") + + if 'markupMassage' in kwargs: + del kwargs['markupMassage'] + warnings.warn( + "BS4 does not respect the markupMassage argument to the " + "BeautifulSoup constructor. The tree builder is responsible " + "for any necessary markup massage.") + + if 'smartQuotesTo' in kwargs: + del kwargs['smartQuotesTo'] + warnings.warn( + "BS4 does not respect the smartQuotesTo argument to the " + "BeautifulSoup constructor. Smart quotes are always converted " + "to Unicode characters.") + + if 'selfClosingTags' in kwargs: + del kwargs['selfClosingTags'] + warnings.warn( + "BS4 does not respect the selfClosingTags argument to the " + "BeautifulSoup constructor. The tree builder is responsible " + "for understanding self-closing tags.") + + if 'isHTML' in kwargs: + del kwargs['isHTML'] + warnings.warn( + "BS4 does not respect the isHTML argument to the " + "BeautifulSoup constructor. Suggest you use " + "features='lxml' for HTML and features='lxml-xml' for " + "XML.") + + def deprecated_argument(old_name, new_name): + if old_name in kwargs: + warnings.warn( + 'The "%s" argument to the BeautifulSoup constructor ' + 'has been renamed to "%s."' % (old_name, new_name), + DeprecationWarning + ) + return kwargs.pop(old_name) + return None + + parse_only = parse_only or deprecated_argument( + "parseOnlyThese", "parse_only") + + from_encoding = from_encoding or deprecated_argument( + "fromEncoding", "from_encoding") + + if from_encoding and isinstance(markup, str): + warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.") + from_encoding = None + + self.element_classes = element_classes or dict() + + # We need this information to track whether or not the builder + # was specified well enough that we can omit the 'you need to + # specify a parser' warning. + original_builder = builder + original_features = features + + if isinstance(builder, type): + # A builder class was passed in; it needs to be instantiated. + builder_class = builder + builder = None + elif builder is None: + if isinstance(features, str): + features = [features] + if features is None or len(features) == 0: + features = self.DEFAULT_BUILDER_FEATURES + builder_class = builder_registry.lookup(*features) + if builder_class is None: + raise FeatureNotFound( + "Couldn't find a tree builder with the features you " + "requested: %s. Do you need to install a parser library?" + % ",".join(features)) + + # At this point either we have a TreeBuilder instance in + # builder, or we have a builder_class that we can instantiate + # with the remaining **kwargs. + if builder is None: + builder = builder_class(**kwargs) + if not original_builder and not ( + original_features == builder.NAME or + original_features in builder.ALTERNATE_NAMES + ) and markup: + # The user did not tell us which TreeBuilder to use, + # and we had to guess. Issue a warning. + if builder.is_xml: + markup_type = "XML" + else: + markup_type = "HTML" + + # This code adapted from warnings.py so that we get the same line + # of code as our warnings.warn() call gets, even if the answer is wrong + # (as it may be in a multithreading situation). + caller = None + try: + caller = sys._getframe(1) + except ValueError: + pass + if caller: + globals = caller.f_globals + line_number = caller.f_lineno + else: + globals = sys.__dict__ + line_number= 1 + filename = globals.get('__file__') + if filename: + fnl = filename.lower() + if fnl.endswith((".pyc", ".pyo")): + filename = filename[:-1] + if filename: + # If there is no filename at all, the user is most likely in a REPL, + # and the warning is not necessary. + values = dict( + filename=filename, + line_number=line_number, + parser=builder.NAME, + markup_type=markup_type + ) + warnings.warn( + self.NO_PARSER_SPECIFIED_WARNING % values, + GuessedAtParserWarning, stacklevel=2 + ) + else: + if kwargs: + warnings.warn("Keyword arguments to the BeautifulSoup constructor will be ignored. These would normally be passed into the TreeBuilder constructor, but a TreeBuilder instance was passed in as `builder`.") + + self.builder = builder + self.is_xml = builder.is_xml + self.known_xml = self.is_xml + self._namespaces = dict() + self.parse_only = parse_only + + if hasattr(markup, 'read'): # It's a file-type object. + markup = markup.read() + elif len(markup) <= 256 and ( + (isinstance(markup, bytes) and not b'<' in markup) + or (isinstance(markup, str) and not '<' in markup) + ): + # Issue warnings for a couple beginner problems + # involving passing non-markup to Beautiful Soup. + # Beautiful Soup will still parse the input as markup, + # since that is sometimes the intended behavior. + if not self._markup_is_url(markup): + self._markup_resembles_filename(markup) + + rejections = [] + success = False + for (self.markup, self.original_encoding, self.declared_html_encoding, + self.contains_replacement_characters) in ( + self.builder.prepare_markup( + markup, from_encoding, exclude_encodings=exclude_encodings)): + self.reset() + self.builder.initialize_soup(self) + try: + self._feed() + success = True + break + except ParserRejectedMarkup as e: + rejections.append(e) + pass + + if not success: + other_exceptions = [str(e) for e in rejections] + raise ParserRejectedMarkup( + "The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.\n\nOriginal exception(s) from parser:\n " + "\n ".join(other_exceptions) + ) + + # Clear out the markup and remove the builder's circular + # reference to this object. + self.markup = None + self.builder.soup = None + + def __copy__(self): + """Copy a BeautifulSoup object by converting the document to a string and parsing it again.""" + copy = type(self)( + self.encode('utf-8'), builder=self.builder, from_encoding='utf-8' + ) + + # Although we encoded the tree to UTF-8, that may not have + # been the encoding of the original markup. Set the copy's + # .original_encoding to reflect the original object's + # .original_encoding. + copy.original_encoding = self.original_encoding + return copy + + def __getstate__(self): + # Frequently a tree builder can't be pickled. + d = dict(self.__dict__) + if 'builder' in d and d['builder'] is not None and not self.builder.picklable: + d['builder'] = None + return d + + @classmethod + def _decode_markup(cls, markup): + """Ensure `markup` is bytes so it's safe to send into warnings.warn. + + TODO: warnings.warn had this problem back in 2010 but it might not + anymore. + """ + if isinstance(markup, bytes): + decoded = markup.decode('utf-8', 'replace') + else: + decoded = markup + return decoded + + @classmethod + def _markup_is_url(cls, markup): + """Error-handling method to raise a warning if incoming markup looks + like a URL. + + :param markup: A string. + :return: Whether or not the markup resembles a URL + closely enough to justify a warning. + """ + if isinstance(markup, bytes): + space = b' ' + cant_start_with = (b"http:", b"https:") + elif isinstance(markup, str): + space = ' ' + cant_start_with = ("http:", "https:") + else: + return False + + if any(markup.startswith(prefix) for prefix in cant_start_with): + if not space in markup: + warnings.warn( + 'The input looks more like a URL than markup. You may want to use' + ' an HTTP client like requests to get the document behind' + ' the URL, and feed that document to Beautiful Soup.', + MarkupResemblesLocatorWarning + ) + return True + return False + + @classmethod + def _markup_resembles_filename(cls, markup): + """Error-handling method to raise a warning if incoming markup + resembles a filename. + + :param markup: A bytestring or string. + :return: Whether or not the markup resembles a filename + closely enough to justify a warning. + """ + path_characters = '/\\' + extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt'] + if isinstance(markup, bytes): + path_characters = path_characters.encode("utf8") + extensions = [x.encode('utf8') for x in extensions] + filelike = False + if any(x in markup for x in path_characters): + filelike = True + else: + lower = markup.lower() + if any(lower.endswith(ext) for ext in extensions): + filelike = True + if filelike: + warnings.warn( + 'The input looks more like a filename than markup. You may' + ' want to open this file and pass the filehandle into' + ' Beautiful Soup.', + MarkupResemblesLocatorWarning + ) + return True + return False + + def _feed(self): + """Internal method that parses previously set markup, creating a large + number of Tag and NavigableString objects. + """ + # Convert the document to Unicode. + self.builder.reset() + + self.builder.feed(self.markup) + # Close out any unfinished strings and close all the open tags. + self.endData() + while self.currentTag.name != self.ROOT_TAG_NAME: + self.popTag() + + def reset(self): + """Reset this object to a state as though it had never parsed any + markup. + """ + Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) + self.hidden = 1 + self.builder.reset() + self.current_data = [] + self.currentTag = None + self.tagStack = [] + self.open_tag_counter = Counter() + self.preserve_whitespace_tag_stack = [] + self.string_container_stack = [] + self.pushTag(self) + + def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, + sourceline=None, sourcepos=None, **kwattrs): + """Create a new Tag associated with this BeautifulSoup object. + + :param name: The name of the new Tag. + :param namespace: The URI of the new Tag's XML namespace, if any. + :param prefix: The prefix for the new Tag's XML namespace, if any. + :param attrs: A dictionary of this Tag's attribute values; can + be used instead of `kwattrs` for attributes like 'class' + that are reserved words in Python. + :param sourceline: The line number where this tag was + (purportedly) found in its source document. + :param sourcepos: The character position within `sourceline` where this + tag was (purportedly) found. + :param kwattrs: Keyword arguments for the new Tag's attribute values. + + """ + kwattrs.update(attrs) + return self.element_classes.get(Tag, Tag)( + None, self.builder, name, namespace, nsprefix, kwattrs, + sourceline=sourceline, sourcepos=sourcepos + ) + + def string_container(self, base_class=None): + container = base_class or NavigableString + + # There may be a general override of NavigableString. + container = self.element_classes.get( + container, container + ) + + # On top of that, we may be inside a tag that needs a special + # container class. + if self.string_container_stack and container is NavigableString: + container = self.builder.string_containers.get( + self.string_container_stack[-1].name, container + ) + return container + + def new_string(self, s, subclass=None): + """Create a new NavigableString associated with this BeautifulSoup + object. + """ + container = self.string_container(subclass) + return container(s) + + def insert_before(self, *args): + """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement + it because there is nothing before or after it in the parse tree. + """ + raise NotImplementedError("BeautifulSoup objects don't support insert_before().") + + def insert_after(self, *args): + """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement + it because there is nothing before or after it in the parse tree. + """ + raise NotImplementedError("BeautifulSoup objects don't support insert_after().") + + def popTag(self): + """Internal method called by _popToTag when a tag is closed.""" + tag = self.tagStack.pop() + if tag.name in self.open_tag_counter: + self.open_tag_counter[tag.name] -= 1 + if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: + self.preserve_whitespace_tag_stack.pop() + if self.string_container_stack and tag == self.string_container_stack[-1]: + self.string_container_stack.pop() + #print("Pop", tag.name) + if self.tagStack: + self.currentTag = self.tagStack[-1] + return self.currentTag + + def pushTag(self, tag): + """Internal method called by handle_starttag when a tag is opened.""" + #print("Push", tag.name) + if self.currentTag is not None: + self.currentTag.contents.append(tag) + self.tagStack.append(tag) + self.currentTag = self.tagStack[-1] + if tag.name != self.ROOT_TAG_NAME: + self.open_tag_counter[tag.name] += 1 + if tag.name in self.builder.preserve_whitespace_tags: + self.preserve_whitespace_tag_stack.append(tag) + if tag.name in self.builder.string_containers: + self.string_container_stack.append(tag) + + def endData(self, containerClass=None): + """Method called by the TreeBuilder when the end of a data segment + occurs. + """ + if self.current_data: + current_data = ''.join(self.current_data) + # If whitespace is not preserved, and this string contains + # nothing but ASCII spaces, replace it with a single space + # or newline. + if not self.preserve_whitespace_tag_stack: + strippable = True + for i in current_data: + if i not in self.ASCII_SPACES: + strippable = False + break + if strippable: + if '\n' in current_data: + current_data = '\n' + else: + current_data = ' ' + + # Reset the data collector. + self.current_data = [] + + # Should we add this string to the tree at all? + if self.parse_only and len(self.tagStack) <= 1 and \ + (not self.parse_only.text or \ + not self.parse_only.search(current_data)): + return + + containerClass = self.string_container(containerClass) + o = containerClass(current_data) + self.object_was_parsed(o) + + def object_was_parsed(self, o, parent=None, most_recent_element=None): + """Method called by the TreeBuilder to integrate an object into the parse tree.""" + if parent is None: + parent = self.currentTag + if most_recent_element is not None: + previous_element = most_recent_element + else: + previous_element = self._most_recent_element + + next_element = previous_sibling = next_sibling = None + if isinstance(o, Tag): + next_element = o.next_element + next_sibling = o.next_sibling + previous_sibling = o.previous_sibling + if previous_element is None: + previous_element = o.previous_element + + fix = parent.next_element is not None + + o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) + + self._most_recent_element = o + parent.contents.append(o) + + # Check if we are inserting into an already parsed node. + if fix: + self._linkage_fixer(parent) + + def _linkage_fixer(self, el): + """Make sure linkage of this fragment is sound.""" + + first = el.contents[0] + child = el.contents[-1] + descendant = child + + if child is first and el.parent is not None: + # Parent should be linked to first child + el.next_element = child + # We are no longer linked to whatever this element is + prev_el = child.previous_element + if prev_el is not None and prev_el is not el: + prev_el.next_element = None + # First child should be linked to the parent, and no previous siblings. + child.previous_element = el + child.previous_sibling = None + + # We have no sibling as we've been appended as the last. + child.next_sibling = None + + # This index is a tag, dig deeper for a "last descendant" + if isinstance(child, Tag) and child.contents: + descendant = child._last_descendant(False) + + # As the final step, link last descendant. It should be linked + # to the parent's next sibling (if found), else walk up the chain + # and find a parent with a sibling. It should have no next sibling. + descendant.next_element = None + descendant.next_sibling = None + target = el + while True: + if target is None: + break + elif target.next_sibling is not None: + descendant.next_element = target.next_sibling + target.next_sibling.previous_element = child + break + target = target.parent + + def _popToTag(self, name, nsprefix=None, inclusivePop=True): + """Pops the tag stack up to and including the most recent + instance of the given tag. + + If there are no open tags with the given name, nothing will be + popped. + + :param name: Pop up to the most recent tag with this name. + :param nsprefix: The namespace prefix that goes with `name`. + :param inclusivePop: It this is false, pops the tag stack up + to but *not* including the most recent instqance of the + given tag. + + """ + #print("Popping to %s" % name) + if name == self.ROOT_TAG_NAME: + # The BeautifulSoup object itself can never be popped. + return + + most_recently_popped = None + + stack_size = len(self.tagStack) + for i in range(stack_size - 1, 0, -1): + if not self.open_tag_counter.get(name): + break + t = self.tagStack[i] + if (name == t.name and nsprefix == t.prefix): + if inclusivePop: + most_recently_popped = self.popTag() + break + most_recently_popped = self.popTag() + + return most_recently_popped + + def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None, + sourcepos=None, namespaces=None): + """Called by the tree builder when a new tag is encountered. + + :param name: Name of the tag. + :param nsprefix: Namespace prefix for the tag. + :param attrs: A dictionary of attribute values. + :param sourceline: The line number where this tag was found in its + source document. + :param sourcepos: The character position within `sourceline` where this + tag was found. + :param namespaces: A dictionary of all namespace prefix mappings + currently in scope in the document. + + If this method returns None, the tag was rejected by an active + SoupStrainer. You should proceed as if the tag had not occurred + in the document. For instance, if this was a self-closing tag, + don't call handle_endtag. + """ + # print("Start tag %s: %s" % (name, attrs)) + self.endData() + + if (self.parse_only and len(self.tagStack) <= 1 + and (self.parse_only.text + or not self.parse_only.search_tag(name, attrs))): + return None + + tag = self.element_classes.get(Tag, Tag)( + self, self.builder, name, namespace, nsprefix, attrs, + self.currentTag, self._most_recent_element, + sourceline=sourceline, sourcepos=sourcepos, + namespaces=namespaces + ) + if tag is None: + return tag + if self._most_recent_element is not None: + self._most_recent_element.next_element = tag + self._most_recent_element = tag + self.pushTag(tag) + return tag + + def handle_endtag(self, name, nsprefix=None): + """Called by the tree builder when an ending tag is encountered. + + :param name: Name of the tag. + :param nsprefix: Namespace prefix for the tag. + """ + #print("End tag: " + name) + self.endData() + self._popToTag(name, nsprefix) + + def handle_data(self, data): + """Called by the tree builder when a chunk of textual data is encountered.""" + self.current_data.append(data) + + def decode(self, pretty_print=False, + eventual_encoding=DEFAULT_OUTPUT_ENCODING, + formatter="minimal"): + """Returns a string or Unicode representation of the parse tree + as an HTML or XML document. + + :param pretty_print: If this is True, indentation will be used to + make the document more readable. + :param eventual_encoding: The encoding of the final document. + If this is None, the document will be a Unicode string. + """ + if self.is_xml: + # Print the XML declaration + encoding_part = '' + if eventual_encoding in PYTHON_SPECIFIC_ENCODINGS: + # This is a special Python encoding; it can't actually + # go into an XML document because it means nothing + # outside of Python. + eventual_encoding = None + if eventual_encoding != None: + encoding_part = ' encoding="%s"' % eventual_encoding + prefix = '\n' % encoding_part + else: + prefix = '' + if not pretty_print: + indent_level = None + else: + indent_level = 0 + return prefix + super(BeautifulSoup, self).decode( + indent_level, eventual_encoding, formatter) + +# Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup' +_s = BeautifulSoup +_soup = BeautifulSoup + +class BeautifulStoneSoup(BeautifulSoup): + """Deprecated interface to an XML parser.""" + + def __init__(self, *args, **kwargs): + kwargs['features'] = 'xml' + warnings.warn( + 'The BeautifulStoneSoup class is deprecated. Instead of using ' + 'it, pass features="xml" into the BeautifulSoup constructor.', + DeprecationWarning + ) + super(BeautifulStoneSoup, self).__init__(*args, **kwargs) + + +class StopParsing(Exception): + """Exception raised by a TreeBuilder if it's unable to continue parsing.""" + pass + +class FeatureNotFound(ValueError): + """Exception raised by the BeautifulSoup constructor if no parser with the + requested features is found. + """ + pass + + +#If this file is run as a script, act as an HTML pretty-printer. +if __name__ == '__main__': + import sys + soup = BeautifulSoup(sys.stdin) + print((soup.prettify())) diff --git a/sbsheriff/Lib/site-packages/bs4/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..ea95190 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/__pycache__/dammit.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/__pycache__/dammit.cpython-310.pyc new file mode 100644 index 0000000..45d00f8 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/__pycache__/dammit.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/__pycache__/diagnose.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/__pycache__/diagnose.cpython-310.pyc new file mode 100644 index 0000000..7e08c38 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/__pycache__/diagnose.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/__pycache__/element.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/__pycache__/element.cpython-310.pyc new file mode 100644 index 0000000..bcd7b27 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/__pycache__/element.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/__pycache__/formatter.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/__pycache__/formatter.cpython-310.pyc new file mode 100644 index 0000000..c3e2e5f Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/__pycache__/formatter.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/builder/__init__.py b/sbsheriff/Lib/site-packages/bs4/builder/__init__.py new file mode 100644 index 0000000..9f789f3 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/builder/__init__.py @@ -0,0 +1,631 @@ +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +from collections import defaultdict +import itertools +import re +import warnings +import sys +from bs4.element import ( + CharsetMetaAttributeValue, + ContentMetaAttributeValue, + RubyParenthesisString, + RubyTextString, + Stylesheet, + Script, + TemplateString, + nonwhitespace_re +) + +__all__ = [ + 'HTMLTreeBuilder', + 'SAXTreeBuilder', + 'TreeBuilder', + 'TreeBuilderRegistry', + ] + +# Some useful features for a TreeBuilder to have. +FAST = 'fast' +PERMISSIVE = 'permissive' +STRICT = 'strict' +XML = 'xml' +HTML = 'html' +HTML_5 = 'html5' + +class XMLParsedAsHTMLWarning(UserWarning): + """The warning issued when an HTML parser is used to parse + XML that is not XHTML. + """ + MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor.""" + + +class TreeBuilderRegistry(object): + """A way of looking up TreeBuilder subclasses by their name or by desired + features. + """ + + def __init__(self): + self.builders_for_feature = defaultdict(list) + self.builders = [] + + def register(self, treebuilder_class): + """Register a treebuilder based on its advertised features. + + :param treebuilder_class: A subclass of Treebuilder. its .features + attribute should list its features. + """ + for feature in treebuilder_class.features: + self.builders_for_feature[feature].insert(0, treebuilder_class) + self.builders.insert(0, treebuilder_class) + + def lookup(self, *features): + """Look up a TreeBuilder subclass with the desired features. + + :param features: A list of features to look for. If none are + provided, the most recently registered TreeBuilder subclass + will be used. + :return: A TreeBuilder subclass, or None if there's no + registered subclass with all the requested features. + """ + if len(self.builders) == 0: + # There are no builders at all. + return None + + if len(features) == 0: + # They didn't ask for any features. Give them the most + # recently registered builder. + return self.builders[0] + + # Go down the list of features in order, and eliminate any builders + # that don't match every feature. + features = list(features) + features.reverse() + candidates = None + candidate_set = None + while len(features) > 0: + feature = features.pop() + we_have_the_feature = self.builders_for_feature.get(feature, []) + if len(we_have_the_feature) > 0: + if candidates is None: + candidates = we_have_the_feature + candidate_set = set(candidates) + else: + # Eliminate any candidates that don't have this feature. + candidate_set = candidate_set.intersection( + set(we_have_the_feature)) + + # The only valid candidates are the ones in candidate_set. + # Go through the original list of candidates and pick the first one + # that's in candidate_set. + if candidate_set is None: + return None + for candidate in candidates: + if candidate in candidate_set: + return candidate + return None + +# The BeautifulSoup class will take feature lists from developers and use them +# to look up builders in this registry. +builder_registry = TreeBuilderRegistry() + +class TreeBuilder(object): + """Turn a textual document into a Beautiful Soup object tree.""" + + NAME = "[Unknown tree builder]" + ALTERNATE_NAMES = [] + features = [] + + is_xml = False + picklable = False + empty_element_tags = None # A tag will be considered an empty-element + # tag when and only when it has no contents. + + # A value for these tag/attribute combinations is a space- or + # comma-separated list of CDATA, rather than a single CDATA. + DEFAULT_CDATA_LIST_ATTRIBUTES = {} + + # Whitespace should be preserved inside these tags. + DEFAULT_PRESERVE_WHITESPACE_TAGS = set() + + # The textual contents of tags with these names should be + # instantiated with some class other than NavigableString. + DEFAULT_STRING_CONTAINERS = {} + + USE_DEFAULT = object() + + # Most parsers don't keep track of line numbers. + TRACKS_LINE_NUMBERS = False + + def __init__(self, multi_valued_attributes=USE_DEFAULT, + preserve_whitespace_tags=USE_DEFAULT, + store_line_numbers=USE_DEFAULT, + string_containers=USE_DEFAULT, + ): + """Constructor. + + :param multi_valued_attributes: If this is set to None, the + TreeBuilder will not turn any values for attributes like + 'class' into lists. Setting this to a dictionary will + customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES + for an example. + + Internally, these are called "CDATA list attributes", but that + probably doesn't make sense to an end-user, so the argument name + is `multi_valued_attributes`. + + :param preserve_whitespace_tags: A list of tags to treat + the way
 tags are treated in HTML. Tags in this list
+         are immune from pretty-printing; their contents will always be
+         output as-is.
+
+        :param string_containers: A dictionary mapping tag names to
+        the classes that should be instantiated to contain the textual
+        contents of those tags. The default is to use NavigableString
+        for every tag, no matter what the name. You can override the
+        default by changing DEFAULT_STRING_CONTAINERS.
+
+        :param store_line_numbers: If the parser keeps track of the
+         line numbers and positions of the original markup, that
+         information will, by default, be stored in each corresponding
+         `Tag` object. You can turn this off by passing
+         store_line_numbers=False. If the parser you're using doesn't 
+         keep track of this information, then setting store_line_numbers=True
+         will do nothing.
+        """
+        self.soup = None
+        if multi_valued_attributes is self.USE_DEFAULT:
+            multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES
+        self.cdata_list_attributes = multi_valued_attributes
+        if preserve_whitespace_tags is self.USE_DEFAULT:
+            preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS
+        self.preserve_whitespace_tags = preserve_whitespace_tags
+        if store_line_numbers == self.USE_DEFAULT:
+            store_line_numbers = self.TRACKS_LINE_NUMBERS
+        self.store_line_numbers = store_line_numbers 
+        if string_containers == self.USE_DEFAULT:
+            string_containers = self.DEFAULT_STRING_CONTAINERS
+        self.string_containers = string_containers
+        
+    def initialize_soup(self, soup):
+        """The BeautifulSoup object has been initialized and is now
+        being associated with the TreeBuilder.
+
+        :param soup: A BeautifulSoup object.
+        """
+        self.soup = soup
+        
+    def reset(self):
+        """Do any work necessary to reset the underlying parser
+        for a new document.
+
+        By default, this does nothing.
+        """
+        pass
+
+    def can_be_empty_element(self, tag_name):
+        """Might a tag with this name be an empty-element tag?
+
+        The final markup may or may not actually present this tag as
+        self-closing.
+
+        For instance: an HTMLBuilder does not consider a 

tag to be + an empty-element tag (it's not in + HTMLBuilder.empty_element_tags). This means an empty

tag + will be presented as "

", not "

" or "

". + + The default implementation has no opinion about which tags are + empty-element tags, so a tag will be presented as an + empty-element tag if and only if it has no children. + "" will become "", and "bar" will + be left alone. + + :param tag_name: The name of a markup tag. + """ + if self.empty_element_tags is None: + return True + return tag_name in self.empty_element_tags + + def feed(self, markup): + """Run some incoming markup through some parsing process, + populating the `BeautifulSoup` object in self.soup. + + This method is not implemented in TreeBuilder; it must be + implemented in subclasses. + + :return: None. + """ + raise NotImplementedError() + + def prepare_markup(self, markup, user_specified_encoding=None, + document_declared_encoding=None, exclude_encodings=None): + """Run any preliminary steps necessary to make incoming markup + acceptable to the parser. + + :param markup: Some markup -- probably a bytestring. + :param user_specified_encoding: The user asked to try this encoding. + :param document_declared_encoding: The markup itself claims to be + in this encoding. NOTE: This argument is not used by the + calling code and can probably be removed. + :param exclude_encodings: The user asked _not_ to try any of + these encodings. + + :yield: A series of 4-tuples: + (markup, encoding, declared encoding, + has undergone character replacement) + + Each 4-tuple represents a strategy for converting the + document to Unicode and parsing it. Each strategy will be tried + in turn. + + By default, the only strategy is to parse the markup + as-is. See `LXMLTreeBuilderForXML` and + `HTMLParserTreeBuilder` for implementations that take into + account the quirks of particular parsers. + """ + yield markup, None, None, False + + def test_fragment_to_document(self, fragment): + """Wrap an HTML fragment to make it look like a document. + + Different parsers do this differently. For instance, lxml + introduces an empty tag, and html5lib + doesn't. Abstracting this away lets us write simple tests + which run HTML fragments through the parser and compare the + results against other HTML fragments. + + This method should not be used outside of tests. + + :param fragment: A string -- fragment of HTML. + :return: A string -- a full HTML document. + """ + return fragment + + def set_up_substitutions(self, tag): + """Set up any substitutions that will need to be performed on + a `Tag` when it's output as a string. + + By default, this does nothing. See `HTMLTreeBuilder` for a + case where this is used. + + :param tag: A `Tag` + :return: Whether or not a substitution was performed. + """ + return False + + def _replace_cdata_list_attribute_values(self, tag_name, attrs): + """When an attribute value is associated with a tag that can + have multiple values for that attribute, convert the string + value to a list of strings. + + Basically, replaces class="foo bar" with class=["foo", "bar"] + + NOTE: This method modifies its input in place. + + :param tag_name: The name of a tag. + :param attrs: A dictionary containing the tag's attributes. + Any appropriate attribute values will be modified in place. + """ + if not attrs: + return attrs + if self.cdata_list_attributes: + universal = self.cdata_list_attributes.get('*', []) + tag_specific = self.cdata_list_attributes.get( + tag_name.lower(), None) + for attr in list(attrs.keys()): + if attr in universal or (tag_specific and attr in tag_specific): + # We have a "class"-type attribute whose string + # value is a whitespace-separated list of + # values. Split it into a list. + value = attrs[attr] + if isinstance(value, str): + values = nonwhitespace_re.findall(value) + else: + # html5lib sometimes calls setAttributes twice + # for the same tag when rearranging the parse + # tree. On the second call the attribute value + # here is already a list. If this happens, + # leave the value alone rather than trying to + # split it again. + values = value + attrs[attr] = values + return attrs + +class SAXTreeBuilder(TreeBuilder): + """A Beautiful Soup treebuilder that listens for SAX events. + + This is not currently used for anything, but it demonstrates + how a simple TreeBuilder would work. + """ + + def feed(self, markup): + raise NotImplementedError() + + def close(self): + pass + + def startElement(self, name, attrs): + attrs = dict((key[1], value) for key, value in list(attrs.items())) + #print("Start %s, %r" % (name, attrs)) + self.soup.handle_starttag(name, attrs) + + def endElement(self, name): + #print("End %s" % name) + self.soup.handle_endtag(name) + + def startElementNS(self, nsTuple, nodeName, attrs): + # Throw away (ns, nodeName) for now. + self.startElement(nodeName, attrs) + + def endElementNS(self, nsTuple, nodeName): + # Throw away (ns, nodeName) for now. + self.endElement(nodeName) + #handler.endElementNS((ns, node.nodeName), node.nodeName) + + def startPrefixMapping(self, prefix, nodeValue): + # Ignore the prefix for now. + pass + + def endPrefixMapping(self, prefix): + # Ignore the prefix for now. + # handler.endPrefixMapping(prefix) + pass + + def characters(self, content): + self.soup.handle_data(content) + + def startDocument(self): + pass + + def endDocument(self): + pass + + +class HTMLTreeBuilder(TreeBuilder): + """This TreeBuilder knows facts about HTML. + + Such as which tags are empty-element tags. + """ + + empty_element_tags = set([ + # These are from HTML5. + 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr', + + # These are from earlier versions of HTML and are removed in HTML5. + 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer' + ]) + + # The HTML standard defines these as block-level elements. Beautiful + # Soup does not treat these elements differently from other elements, + # but it may do so eventually, and this information is available if + # you need to use it. + block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"]) + + # These HTML tags need special treatment so they can be + # represented by a string class other than NavigableString. + # + # For some of these tags, it's because the HTML standard defines + # an unusual content model for them. I made this list by going + # through the HTML spec + # (https://html.spec.whatwg.org/#metadata-content) and looking for + # "metadata content" elements that can contain strings. + # + # The Ruby tags ( and ) are here despite being normal + # "phrasing content" tags, because the content they contain is + # qualitatively different from other text in the document, and it + # can be useful to be able to distinguish it. + # + # TODO: Arguably

as a +# string. +# +# XXX This code can be removed once most Python 3 users are on 3.2.3. +if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT: + import re + attrfind_tolerant = re.compile( + r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*' + r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?') + HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant + + locatestarttagend = re.compile(r""" + <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name + (?:\s+ # whitespace before attribute name + (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name + (?:\s*=\s* # value indicator + (?:'[^']*' # LITA-enclosed value + |\"[^\"]*\" # LIT-enclosed value + |[^'\">\s]+ # bare value + ) + )? + ) + )* + \s* # trailing whitespace +""", re.VERBOSE) + BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend + + from html.parser import tagfind, attrfind + + def parse_starttag(self, i): + self.__starttag_text = None + endpos = self.check_for_whole_start_tag(i) + if endpos < 0: + return endpos + rawdata = self.rawdata + self.__starttag_text = rawdata[i:endpos] + + # Now parse the data between i+1 and j into a tag and attrs + attrs = [] + match = tagfind.match(rawdata, i+1) + assert match, 'unexpected call to parse_starttag()' + k = match.end() + self.lasttag = tag = rawdata[i+1:k].lower() + while k < endpos: + if self.strict: + m = attrfind.match(rawdata, k) + else: + m = attrfind_tolerant.match(rawdata, k) + if not m: + break + attrname, rest, attrvalue = m.group(1, 2, 3) + if not rest: + attrvalue = None + elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ + attrvalue[:1] == '"' == attrvalue[-1:]: + attrvalue = attrvalue[1:-1] + if attrvalue: + attrvalue = self.unescape(attrvalue) + attrs.append((attrname.lower(), attrvalue)) + k = m.end() + + end = rawdata[k:endpos].strip() + if end not in (">", "/>"): + lineno, offset = self.getpos() + if "\n" in self.__starttag_text: + lineno = lineno + self.__starttag_text.count("\n") + offset = len(self.__starttag_text) \ + - self.__starttag_text.rfind("\n") + else: + offset = offset + len(self.__starttag_text) + if self.strict: + self.error("junk characters in start tag: %r" + % (rawdata[k:endpos][:20],)) + self.handle_data(rawdata[i:endpos]) + return endpos + if end.endswith('/>'): + # XHTML-style empty tag: + self.handle_startendtag(tag, attrs) + else: + self.handle_starttag(tag, attrs) + if tag in self.CDATA_CONTENT_ELEMENTS: + self.set_cdata_mode(tag) + return endpos + + def set_cdata_mode(self, elem): + self.cdata_elem = elem.lower() + self.interesting = re.compile(r'' % self.cdata_elem, re.I) + + BeautifulSoupHTMLParser.parse_starttag = parse_starttag + BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode + + CONSTRUCTOR_TAKES_STRICT = True diff --git a/sbsheriff/Lib/site-packages/bs4/builder/_lxml.py b/sbsheriff/Lib/site-packages/bs4/builder/_lxml.py new file mode 100644 index 0000000..971c81e --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/builder/_lxml.py @@ -0,0 +1,386 @@ +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +__all__ = [ + 'LXMLTreeBuilderForXML', + 'LXMLTreeBuilder', + ] + +try: + from collections.abc import Callable # Python 3.6 +except ImportError as e: + from collections import Callable + +from io import BytesIO +from io import StringIO +from lxml import etree +from bs4.element import ( + Comment, + Doctype, + NamespacedAttribute, + ProcessingInstruction, + XMLProcessingInstruction, +) +from bs4.builder import ( + DetectsXMLParsedAsHTML, + FAST, + HTML, + HTMLTreeBuilder, + PERMISSIVE, + ParserRejectedMarkup, + TreeBuilder, + XML) +from bs4.dammit import EncodingDetector + +LXML = 'lxml' + +def _invert(d): + "Invert a dictionary." + return dict((v,k) for k, v in list(d.items())) + +class LXMLTreeBuilderForXML(TreeBuilder): + DEFAULT_PARSER_CLASS = etree.XMLParser + + is_xml = True + processing_instruction_class = XMLProcessingInstruction + + NAME = "lxml-xml" + ALTERNATE_NAMES = ["xml"] + + # Well, it's permissive by XML parser standards. + features = [NAME, LXML, XML, FAST, PERMISSIVE] + + CHUNK_SIZE = 512 + + # This namespace mapping is specified in the XML Namespace + # standard. + DEFAULT_NSMAPS = dict(xml='http://www.w3.org/XML/1998/namespace') + + DEFAULT_NSMAPS_INVERTED = _invert(DEFAULT_NSMAPS) + + # NOTE: If we parsed Element objects and looked at .sourceline, + # we'd be able to see the line numbers from the original document. + # But instead we build an XMLParser or HTMLParser object to serve + # as the target of parse messages, and those messages don't include + # line numbers. + # See: https://bugs.launchpad.net/lxml/+bug/1846906 + + def initialize_soup(self, soup): + """Let the BeautifulSoup object know about the standard namespace + mapping. + + :param soup: A `BeautifulSoup`. + """ + super(LXMLTreeBuilderForXML, self).initialize_soup(soup) + self._register_namespaces(self.DEFAULT_NSMAPS) + + def _register_namespaces(self, mapping): + """Let the BeautifulSoup object know about namespaces encountered + while parsing the document. + + This might be useful later on when creating CSS selectors. + + This will track (almost) all namespaces, even ones that were + only in scope for part of the document. If two namespaces have + the same prefix, only the first one encountered will be + tracked. Un-prefixed namespaces are not tracked. + + :param mapping: A dictionary mapping namespace prefixes to URIs. + """ + for key, value in list(mapping.items()): + # This is 'if key' and not 'if key is not None' because we + # don't track un-prefixed namespaces. Soupselect will + # treat an un-prefixed namespace as the default, which + # causes confusion in some cases. + if key and key not in self.soup._namespaces: + # Let the BeautifulSoup object know about a new namespace. + # If there are multiple namespaces defined with the same + # prefix, the first one in the document takes precedence. + self.soup._namespaces[key] = value + + def default_parser(self, encoding): + """Find the default parser for the given encoding. + + :param encoding: A string. + :return: Either a parser object or a class, which + will be instantiated with default arguments. + """ + if self._default_parser is not None: + return self._default_parser + return etree.XMLParser( + target=self, strip_cdata=False, recover=True, encoding=encoding) + + def parser_for(self, encoding): + """Instantiate an appropriate parser for the given encoding. + + :param encoding: A string. + :return: A parser object such as an `etree.XMLParser`. + """ + # Use the default parser. + parser = self.default_parser(encoding) + + if isinstance(parser, Callable): + # Instantiate the parser with default arguments + parser = parser( + target=self, strip_cdata=False, recover=True, encoding=encoding + ) + return parser + + def __init__(self, parser=None, empty_element_tags=None, **kwargs): + # TODO: Issue a warning if parser is present but not a + # callable, since that means there's no way to create new + # parsers for different encodings. + self._default_parser = parser + if empty_element_tags is not None: + self.empty_element_tags = set(empty_element_tags) + self.soup = None + self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] + self.active_namespace_prefixes = [dict(self.DEFAULT_NSMAPS)] + super(LXMLTreeBuilderForXML, self).__init__(**kwargs) + + def _getNsTag(self, tag): + # Split the namespace URL out of a fully-qualified lxml tag + # name. Copied from lxml's src/lxml/sax.py. + if tag[0] == '{': + return tuple(tag[1:].split('}', 1)) + else: + return (None, tag) + + def prepare_markup(self, markup, user_specified_encoding=None, + exclude_encodings=None, + document_declared_encoding=None): + """Run any preliminary steps necessary to make incoming markup + acceptable to the parser. + + lxml really wants to get a bytestring and convert it to + Unicode itself. So instead of using UnicodeDammit to convert + the bytestring to Unicode using different encodings, this + implementation uses EncodingDetector to iterate over the + encodings, and tell lxml to try to parse the document as each + one in turn. + + :param markup: Some markup -- hopefully a bytestring. + :param user_specified_encoding: The user asked to try this encoding. + :param document_declared_encoding: The markup itself claims to be + in this encoding. + :param exclude_encodings: The user asked _not_ to try any of + these encodings. + + :yield: A series of 4-tuples: + (markup, encoding, declared encoding, + has undergone character replacement) + + Each 4-tuple represents a strategy for converting the + document to Unicode and parsing it. Each strategy will be tried + in turn. + """ + is_html = not self.is_xml + if is_html: + self.processing_instruction_class = ProcessingInstruction + # We're in HTML mode, so if we're given XML, that's worth + # noting. + DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(markup) + else: + self.processing_instruction_class = XMLProcessingInstruction + + if isinstance(markup, str): + # We were given Unicode. Maybe lxml can parse Unicode on + # this system? + + # TODO: This is a workaround for + # https://bugs.launchpad.net/lxml/+bug/1948551. + # We can remove it once the upstream issue is fixed. + if len(markup) > 0 and markup[0] == u'\N{BYTE ORDER MARK}': + markup = markup[1:] + yield markup, None, document_declared_encoding, False + + if isinstance(markup, str): + # No, apparently not. Convert the Unicode to UTF-8 and + # tell lxml to parse it as UTF-8. + yield (markup.encode("utf8"), "utf8", + document_declared_encoding, False) + + # This was provided by the end-user; treat it as a known + # definite encoding per the algorithm laid out in the HTML5 + # spec. (See the EncodingDetector class for details.) + known_definite_encodings = [user_specified_encoding] + + # This was found in the document; treat it as a slightly lower-priority + # user encoding. + user_encodings = [document_declared_encoding] + detector = EncodingDetector( + markup, known_definite_encodings=known_definite_encodings, + user_encodings=user_encodings, is_html=is_html, + exclude_encodings=exclude_encodings + ) + for encoding in detector.encodings: + yield (detector.markup, encoding, document_declared_encoding, False) + + def feed(self, markup): + if isinstance(markup, bytes): + markup = BytesIO(markup) + elif isinstance(markup, str): + markup = StringIO(markup) + + # Call feed() at least once, even if the markup is empty, + # or the parser won't be initialized. + data = markup.read(self.CHUNK_SIZE) + try: + self.parser = self.parser_for(self.soup.original_encoding) + self.parser.feed(data) + while len(data) != 0: + # Now call feed() on the rest of the data, chunk by chunk. + data = markup.read(self.CHUNK_SIZE) + if len(data) != 0: + self.parser.feed(data) + self.parser.close() + except (UnicodeDecodeError, LookupError, etree.ParserError) as e: + raise ParserRejectedMarkup(e) + + def close(self): + self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] + + def start(self, name, attrs, nsmap={}): + # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. + attrs = dict(attrs) + nsprefix = None + # Invert each namespace map as it comes in. + if len(nsmap) == 0 and len(self.nsmaps) > 1: + # There are no new namespaces for this tag, but + # non-default namespaces are in play, so we need a + # separate tag stack to know when they end. + self.nsmaps.append(None) + elif len(nsmap) > 0: + # A new namespace mapping has come into play. + + # First, Let the BeautifulSoup object know about it. + self._register_namespaces(nsmap) + + # Then, add it to our running list of inverted namespace + # mappings. + self.nsmaps.append(_invert(nsmap)) + + # The currently active namespace prefixes have + # changed. Calculate the new mapping so it can be stored + # with all Tag objects created while these prefixes are in + # scope. + current_mapping = dict(self.active_namespace_prefixes[-1]) + current_mapping.update(nsmap) + + # We should not track un-prefixed namespaces as we can only hold one + # and it will be recognized as the default namespace by soupsieve, + # which may be confusing in some situations. + if '' in current_mapping: + del current_mapping[''] + self.active_namespace_prefixes.append(current_mapping) + + # Also treat the namespace mapping as a set of attributes on the + # tag, so we can recreate it later. + attrs = attrs.copy() + for prefix, namespace in list(nsmap.items()): + attribute = NamespacedAttribute( + "xmlns", prefix, "http://www.w3.org/2000/xmlns/") + attrs[attribute] = namespace + + # Namespaces are in play. Find any attributes that came in + # from lxml with namespaces attached to their names, and + # turn then into NamespacedAttribute objects. + new_attrs = {} + for attr, value in list(attrs.items()): + namespace, attr = self._getNsTag(attr) + if namespace is None: + new_attrs[attr] = value + else: + nsprefix = self._prefix_for_namespace(namespace) + attr = NamespacedAttribute(nsprefix, attr, namespace) + new_attrs[attr] = value + attrs = new_attrs + + namespace, name = self._getNsTag(name) + nsprefix = self._prefix_for_namespace(namespace) + self.soup.handle_starttag( + name, namespace, nsprefix, attrs, + namespaces=self.active_namespace_prefixes[-1] + ) + + def _prefix_for_namespace(self, namespace): + """Find the currently active prefix for the given namespace.""" + if namespace is None: + return None + for inverted_nsmap in reversed(self.nsmaps): + if inverted_nsmap is not None and namespace in inverted_nsmap: + return inverted_nsmap[namespace] + return None + + def end(self, name): + self.soup.endData() + completed_tag = self.soup.tagStack[-1] + namespace, name = self._getNsTag(name) + nsprefix = None + if namespace is not None: + for inverted_nsmap in reversed(self.nsmaps): + if inverted_nsmap is not None and namespace in inverted_nsmap: + nsprefix = inverted_nsmap[namespace] + break + self.soup.handle_endtag(name, nsprefix) + if len(self.nsmaps) > 1: + # This tag, or one of its parents, introduced a namespace + # mapping, so pop it off the stack. + out_of_scope_nsmap = self.nsmaps.pop() + + if out_of_scope_nsmap is not None: + # This tag introduced a namespace mapping which is no + # longer in scope. Recalculate the currently active + # namespace prefixes. + self.active_namespace_prefixes.pop() + + def pi(self, target, data): + self.soup.endData() + data = target + ' ' + data + self.soup.handle_data(data) + self.soup.endData(self.processing_instruction_class) + + def data(self, content): + self.soup.handle_data(content) + + def doctype(self, name, pubid, system): + self.soup.endData() + doctype = Doctype.for_name_and_ids(name, pubid, system) + self.soup.object_was_parsed(doctype) + + def comment(self, content): + "Handle comments as Comment objects." + self.soup.endData() + self.soup.handle_data(content) + self.soup.endData(Comment) + + def test_fragment_to_document(self, fragment): + """See `TreeBuilder`.""" + return '\n%s' % fragment + + +class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): + + NAME = LXML + ALTERNATE_NAMES = ["lxml-html"] + + features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] + is_xml = False + processing_instruction_class = ProcessingInstruction + + def default_parser(self, encoding): + return etree.HTMLParser + + def feed(self, markup): + encoding = self.soup.original_encoding + try: + self.parser = self.parser_for(encoding) + self.parser.feed(markup) + self.parser.close() + except (UnicodeDecodeError, LookupError, etree.ParserError) as e: + raise ParserRejectedMarkup(e) + + + def test_fragment_to_document(self, fragment): + """See `TreeBuilder`.""" + return '%s' % fragment diff --git a/sbsheriff/Lib/site-packages/bs4/dammit.py b/sbsheriff/Lib/site-packages/bs4/dammit.py new file mode 100644 index 0000000..692433c --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/dammit.py @@ -0,0 +1,1095 @@ +# -*- coding: utf-8 -*- +"""Beautiful Soup bonus library: Unicode, Dammit + +This library converts a bytestream to Unicode through any means +necessary. It is heavily based on code from Mark Pilgrim's Universal +Feed Parser. It works best on XML and HTML, but it does not rewrite the +XML or HTML to reflect a new encoding; that's the tree builder's job. +""" +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +from html.entities import codepoint2name +from collections import defaultdict +import codecs +import re +import logging +import string + +# Import a library to autodetect character encodings. We'll support +# any of a number of libraries that all support the same API: +# +# * cchardet +# * chardet +# * charset-normalizer +chardet_module = None +try: + # PyPI package: cchardet + import cchardet as chardet_module +except ImportError: + try: + # Debian package: python-chardet + # PyPI package: chardet + import chardet as chardet_module + except ImportError: + try: + # PyPI package: charset-normalizer + import charset_normalizer as chardet_module + except ImportError: + # No chardet available. + chardet_module = None + +if chardet_module: + def chardet_dammit(s): + if isinstance(s, str): + return None + return chardet_module.detect(s)['encoding'] +else: + def chardet_dammit(s): + return None + +# Build bytestring and Unicode versions of regular expressions for finding +# a declared encoding inside an XML or HTML document. +xml_encoding = '^\\s*<\\?.*encoding=[\'"](.*?)[\'"].*\\?>' +html_meta = '<\\s*meta[^>]+charset\\s*=\\s*["\']?([^>]*?)[ /;\'">]' +encoding_res = dict() +encoding_res[bytes] = { + 'html' : re.compile(html_meta.encode("ascii"), re.I), + 'xml' : re.compile(xml_encoding.encode("ascii"), re.I), +} +encoding_res[str] = { + 'html' : re.compile(html_meta, re.I), + 'xml' : re.compile(xml_encoding, re.I) +} + +from html.entities import html5 + +class EntitySubstitution(object): + """The ability to substitute XML or HTML entities for certain characters.""" + + def _populate_class_variables(): + """Initialize variables used by this class to manage the plethora of + HTML5 named entities. + + This function returns a 3-tuple containing two dictionaries + and a regular expression: + + unicode_to_name - A mapping of Unicode strings like "⦨" to + entity names like "angmsdaa". When a single Unicode string has + multiple entity names, we try to choose the most commonly-used + name. + + name_to_unicode: A mapping of entity names like "angmsdaa" to + Unicode strings like "⦨". + + named_entity_re: A regular expression matching (almost) any + Unicode string that corresponds to an HTML5 named entity. + """ + unicode_to_name = {} + name_to_unicode = {} + + short_entities = set() + long_entities_by_first_character = defaultdict(set) + + for name_with_semicolon, character in sorted(html5.items()): + # "It is intentional, for legacy compatibility, that many + # code points have multiple character reference names. For + # example, some appear both with and without the trailing + # semicolon, or with different capitalizations." + # - https://html.spec.whatwg.org/multipage/named-characters.html#named-character-references + # + # The parsers are in charge of handling (or not) character + # references with no trailing semicolon, so we remove the + # semicolon whenever it appears. + if name_with_semicolon.endswith(';'): + name = name_with_semicolon[:-1] + else: + name = name_with_semicolon + + # When parsing HTML, we want to recognize any known named + # entity and convert it to a sequence of Unicode + # characters. + if name not in name_to_unicode: + name_to_unicode[name] = character + + # When _generating_ HTML, we want to recognize special + # character sequences that _could_ be converted to named + # entities. + unicode_to_name[character] = name + + # We also need to build a regular expression that lets us + # _find_ those characters in output strings so we can + # replace them. + # + # This is tricky, for two reasons. + + if (len(character) == 1 and ord(character) < 128 + and character not in '<>&'): + # First, it would be annoying to turn single ASCII + # characters like | into named entities like + # |. The exceptions are <>&, which we _must_ + # turn into named entities to produce valid HTML. + continue + + if len(character) > 1 and all(ord(x) < 128 for x in character): + # We also do not want to turn _combinations_ of ASCII + # characters like 'fj' into named entities like 'fj', + # though that's more debateable. + continue + + # Second, some named entities have a Unicode value that's + # a subset of the Unicode value for some _other_ named + # entity. As an example, \u2267' is ≧, + # but '\u2267\u0338' is ≧̸. Our regular + # expression needs to match the first two characters of + # "\u2267\u0338foo", but only the first character of + # "\u2267foo". + # + # In this step, we build two sets of characters that + # _eventually_ need to go into the regular expression. But + # we won't know exactly what the regular expression needs + # to look like until we've gone through the entire list of + # named entities. + if len(character) == 1: + short_entities.add(character) + else: + long_entities_by_first_character[character[0]].add(character) + + # Now that we've been through the entire list of entities, we + # can create a regular expression that matches any of them. + particles = set() + for short in short_entities: + long_versions = long_entities_by_first_character[short] + if not long_versions: + particles.add(short) + else: + ignore = "".join([x[1] for x in long_versions]) + # This finds, e.g. \u2267 but only if it is _not_ + # followed by \u0338. + particles.add("%s(?![%s])" % (short, ignore)) + + for long_entities in list(long_entities_by_first_character.values()): + for long_entity in long_entities: + particles.add(long_entity) + + re_definition = "(%s)" % "|".join(particles) + + # If an entity shows up in both html5 and codepoint2name, it's + # likely that HTML5 gives it several different names, such as + # 'rsquo' and 'rsquor'. When converting Unicode characters to + # named entities, the codepoint2name name should take + # precedence where possible, since that's the more easily + # recognizable one. + for codepoint, name in list(codepoint2name.items()): + character = chr(codepoint) + unicode_to_name[character] = name + + return unicode_to_name, name_to_unicode, re.compile(re_definition) + (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, + CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() + + CHARACTER_TO_XML_ENTITY = { + "'": "apos", + '"': "quot", + "&": "amp", + "<": "lt", + ">": "gt", + } + + BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" + "&(?!#\\d+;|#x[0-9a-fA-F]+;|\\w+;)" + ")") + + AMPERSAND_OR_BRACKET = re.compile("([<>&])") + + @classmethod + def _substitute_html_entity(cls, matchobj): + """Used with a regular expression to substitute the + appropriate HTML entity for a special character string.""" + entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) + return "&%s;" % entity + + @classmethod + def _substitute_xml_entity(cls, matchobj): + """Used with a regular expression to substitute the + appropriate XML entity for a special character string.""" + entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] + return "&%s;" % entity + + @classmethod + def quoted_attribute_value(self, value): + """Make a value into a quoted XML attribute, possibly escaping it. + + Most strings will be quoted using double quotes. + + Bob's Bar -> "Bob's Bar" + + If a string contains double quotes, it will be quoted using + single quotes. + + Welcome to "my bar" -> 'Welcome to "my bar"' + + If a string contains both single and double quotes, the + double quotes will be escaped, and the string will be quoted + using double quotes. + + Welcome to "Bob's Bar" -> "Welcome to "Bob's bar" + """ + quote_with = '"' + if '"' in value: + if "'" in value: + # The string contains both single and double + # quotes. Turn the double quotes into + # entities. We quote the double quotes rather than + # the single quotes because the entity name is + # """ whether this is HTML or XML. If we + # quoted the single quotes, we'd have to decide + # between ' and &squot;. + replace_with = """ + value = value.replace('"', replace_with) + else: + # There are double quotes but no single quotes. + # We can use single quotes to quote the attribute. + quote_with = "'" + return quote_with + value + quote_with + + @classmethod + def substitute_xml(cls, value, make_quoted_attribute=False): + """Substitute XML entities for special XML characters. + + :param value: A string to be substituted. The less-than sign + will become <, the greater-than sign will become >, + and any ampersands will become &. If you want ampersands + that appear to be part of an entity definition to be left + alone, use substitute_xml_containing_entities() instead. + + :param make_quoted_attribute: If True, then the string will be + quoted, as befits an attribute value. + """ + # Escape angle brackets and ampersands. + value = cls.AMPERSAND_OR_BRACKET.sub( + cls._substitute_xml_entity, value) + + if make_quoted_attribute: + value = cls.quoted_attribute_value(value) + return value + + @classmethod + def substitute_xml_containing_entities( + cls, value, make_quoted_attribute=False): + """Substitute XML entities for special XML characters. + + :param value: A string to be substituted. The less-than sign will + become <, the greater-than sign will become >, and any + ampersands that are not part of an entity defition will + become &. + + :param make_quoted_attribute: If True, then the string will be + quoted, as befits an attribute value. + """ + # Escape angle brackets, and ampersands that aren't part of + # entities. + value = cls.BARE_AMPERSAND_OR_BRACKET.sub( + cls._substitute_xml_entity, value) + + if make_quoted_attribute: + value = cls.quoted_attribute_value(value) + return value + + @classmethod + def substitute_html(cls, s): + """Replace certain Unicode characters with named HTML entities. + + This differs from data.encode(encoding, 'xmlcharrefreplace') + in that the goal is to make the result more readable (to those + with ASCII displays) rather than to recover from + errors. There's absolutely nothing wrong with a UTF-8 string + containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that + character with "é" will make it more readable to some + people. + + :param s: A Unicode string. + """ + return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( + cls._substitute_html_entity, s) + + +class EncodingDetector: + """Suggests a number of possible encodings for a bytestring. + + Order of precedence: + + 1. Encodings you specifically tell EncodingDetector to try first + (the known_definite_encodings argument to the constructor). + + 2. An encoding determined by sniffing the document's byte-order mark. + + 3. Encodings you specifically tell EncodingDetector to try if + byte-order mark sniffing fails (the user_encodings argument to the + constructor). + + 4. An encoding declared within the bytestring itself, either in an + XML declaration (if the bytestring is to be interpreted as an XML + document), or in a tag (if the bytestring is to be + interpreted as an HTML document.) + + 5. An encoding detected through textual analysis by chardet, + cchardet, or a similar external library. + + 4. UTF-8. + + 5. Windows-1252. + + """ + def __init__(self, markup, known_definite_encodings=None, + is_html=False, exclude_encodings=None, + user_encodings=None, override_encodings=None): + """Constructor. + + :param markup: Some markup in an unknown encoding. + + :param known_definite_encodings: When determining the encoding + of `markup`, these encodings will be tried first, in + order. In HTML terms, this corresponds to the "known + definite encoding" step defined here: + https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding + + :param user_encodings: These encodings will be tried after the + `known_definite_encodings` have been tried and failed, and + after an attempt to sniff the encoding by looking at a + byte order mark has failed. In HTML terms, this + corresponds to the step "user has explicitly instructed + the user agent to override the document's character + encoding", defined here: + https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding + + :param override_encodings: A deprecated alias for + known_definite_encodings. Any encodings here will be tried + immediately after the encodings in + known_definite_encodings. + + :param is_html: If True, this markup is considered to be + HTML. Otherwise it's assumed to be XML. + + :param exclude_encodings: These encodings will not be tried, + even if they otherwise would be. + + """ + self.known_definite_encodings = list(known_definite_encodings or []) + if override_encodings: + self.known_definite_encodings += override_encodings + self.user_encodings = user_encodings or [] + exclude_encodings = exclude_encodings or [] + self.exclude_encodings = set([x.lower() for x in exclude_encodings]) + self.chardet_encoding = None + self.is_html = is_html + self.declared_encoding = None + + # First order of business: strip a byte-order mark. + self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) + + def _usable(self, encoding, tried): + """Should we even bother to try this encoding? + + :param encoding: Name of an encoding. + :param tried: Encodings that have already been tried. This will be modified + as a side effect. + """ + if encoding is not None: + encoding = encoding.lower() + if encoding in self.exclude_encodings: + return False + if encoding not in tried: + tried.add(encoding) + return True + return False + + @property + def encodings(self): + """Yield a number of encodings that might work for this markup. + + :yield: A sequence of strings. + """ + tried = set() + + # First, try the known definite encodings + for e in self.known_definite_encodings: + if self._usable(e, tried): + yield e + + # Did the document originally start with a byte-order mark + # that indicated its encoding? + if self._usable(self.sniffed_encoding, tried): + yield self.sniffed_encoding + + # Sniffing the byte-order mark did nothing; try the user + # encodings. + for e in self.user_encodings: + if self._usable(e, tried): + yield e + + # Look within the document for an XML or HTML encoding + # declaration. + if self.declared_encoding is None: + self.declared_encoding = self.find_declared_encoding( + self.markup, self.is_html) + if self._usable(self.declared_encoding, tried): + yield self.declared_encoding + + # Use third-party character set detection to guess at the + # encoding. + if self.chardet_encoding is None: + self.chardet_encoding = chardet_dammit(self.markup) + if self._usable(self.chardet_encoding, tried): + yield self.chardet_encoding + + # As a last-ditch effort, try utf-8 and windows-1252. + for e in ('utf-8', 'windows-1252'): + if self._usable(e, tried): + yield e + + @classmethod + def strip_byte_order_mark(cls, data): + """If a byte-order mark is present, strip it and return the encoding it implies. + + :param data: Some markup. + :return: A 2-tuple (modified data, implied encoding) + """ + encoding = None + if isinstance(data, str): + # Unicode data cannot have a byte-order mark. + return data, encoding + if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \ + and (data[2:4] != '\x00\x00'): + encoding = 'utf-16be' + data = data[2:] + elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \ + and (data[2:4] != '\x00\x00'): + encoding = 'utf-16le' + data = data[2:] + elif data[:3] == b'\xef\xbb\xbf': + encoding = 'utf-8' + data = data[3:] + elif data[:4] == b'\x00\x00\xfe\xff': + encoding = 'utf-32be' + data = data[4:] + elif data[:4] == b'\xff\xfe\x00\x00': + encoding = 'utf-32le' + data = data[4:] + return data, encoding + + @classmethod + def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False): + """Given a document, tries to find its declared encoding. + + An XML encoding is declared at the beginning of the document. + + An HTML encoding is declared in a tag, hopefully near the + beginning of the document. + + :param markup: Some markup. + :param is_html: If True, this markup is considered to be HTML. Otherwise + it's assumed to be XML. + :param search_entire_document: Since an encoding is supposed to declared near the beginning + of the document, most of the time it's only necessary to search a few kilobytes of data. + Set this to True to force this method to search the entire document. + """ + if search_entire_document: + xml_endpos = html_endpos = len(markup) + else: + xml_endpos = 1024 + html_endpos = max(2048, int(len(markup) * 0.05)) + + if isinstance(markup, bytes): + res = encoding_res[bytes] + else: + res = encoding_res[str] + + xml_re = res['xml'] + html_re = res['html'] + declared_encoding = None + declared_encoding_match = xml_re.search(markup, endpos=xml_endpos) + if not declared_encoding_match and is_html: + declared_encoding_match = html_re.search(markup, endpos=html_endpos) + if declared_encoding_match is not None: + declared_encoding = declared_encoding_match.groups()[0] + if declared_encoding: + if isinstance(declared_encoding, bytes): + declared_encoding = declared_encoding.decode('ascii', 'replace') + return declared_encoding.lower() + return None + +class UnicodeDammit: + """A class for detecting the encoding of a *ML document and + converting it to a Unicode string. If the source encoding is + windows-1252, can replace MS smart quotes with their HTML or XML + equivalents.""" + + # This dictionary maps commonly seen values for "charset" in HTML + # meta tags to the corresponding Python codec names. It only covers + # values that aren't in Python's aliases and can't be determined + # by the heuristics in find_codec. + CHARSET_ALIASES = {"macintosh": "mac-roman", + "x-sjis": "shift-jis"} + + ENCODINGS_WITH_SMART_QUOTES = [ + "windows-1252", + "iso-8859-1", + "iso-8859-2", + ] + + def __init__(self, markup, known_definite_encodings=[], + smart_quotes_to=None, is_html=False, exclude_encodings=[], + user_encodings=None, override_encodings=None + ): + """Constructor. + + :param markup: A bytestring representing markup in an unknown encoding. + + :param known_definite_encodings: When determining the encoding + of `markup`, these encodings will be tried first, in + order. In HTML terms, this corresponds to the "known + definite encoding" step defined here: + https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding + + :param user_encodings: These encodings will be tried after the + `known_definite_encodings` have been tried and failed, and + after an attempt to sniff the encoding by looking at a + byte order mark has failed. In HTML terms, this + corresponds to the step "user has explicitly instructed + the user agent to override the document's character + encoding", defined here: + https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding + + :param override_encodings: A deprecated alias for + known_definite_encodings. Any encodings here will be tried + immediately after the encodings in + known_definite_encodings. + + :param smart_quotes_to: By default, Microsoft smart quotes will, like all other characters, be converted + to Unicode characters. Setting this to 'ascii' will convert them to ASCII quotes instead. + Setting it to 'xml' will convert them to XML entity references, and setting it to 'html' + will convert them to HTML entity references. + :param is_html: If True, this markup is considered to be HTML. Otherwise + it's assumed to be XML. + :param exclude_encodings: These encodings will not be considered, even + if the sniffing code thinks they might make sense. + + """ + self.smart_quotes_to = smart_quotes_to + self.tried_encodings = [] + self.contains_replacement_characters = False + self.is_html = is_html + self.log = logging.getLogger(__name__) + self.detector = EncodingDetector( + markup, known_definite_encodings, is_html, exclude_encodings, + user_encodings, override_encodings + ) + + # Short-circuit if the data is in Unicode to begin with. + if isinstance(markup, str) or markup == '': + self.markup = markup + self.unicode_markup = str(markup) + self.original_encoding = None + return + + # The encoding detector may have stripped a byte-order mark. + # Use the stripped markup from this point on. + self.markup = self.detector.markup + + u = None + for encoding in self.detector.encodings: + markup = self.detector.markup + u = self._convert_from(encoding) + if u is not None: + break + + if not u: + # None of the encodings worked. As an absolute last resort, + # try them again with character replacement. + + for encoding in self.detector.encodings: + if encoding != "ascii": + u = self._convert_from(encoding, "replace") + if u is not None: + self.log.warning( + "Some characters could not be decoded, and were " + "replaced with REPLACEMENT CHARACTER." + ) + self.contains_replacement_characters = True + break + + # If none of that worked, we could at this point force it to + # ASCII, but that would destroy so much data that I think + # giving up is better. + self.unicode_markup = u + if not u: + self.original_encoding = None + + def _sub_ms_char(self, match): + """Changes a MS smart quote character to an XML or HTML + entity, or an ASCII character.""" + orig = match.group(1) + if self.smart_quotes_to == 'ascii': + sub = self.MS_CHARS_TO_ASCII.get(orig).encode() + else: + sub = self.MS_CHARS.get(orig) + if type(sub) == tuple: + if self.smart_quotes_to == 'xml': + sub = '&#x'.encode() + sub[1].encode() + ';'.encode() + else: + sub = '&'.encode() + sub[0].encode() + ';'.encode() + else: + sub = sub.encode() + return sub + + def _convert_from(self, proposed, errors="strict"): + """Attempt to convert the markup to the proposed encoding. + + :param proposed: The name of a character encoding. + """ + proposed = self.find_codec(proposed) + if not proposed or (proposed, errors) in self.tried_encodings: + return None + self.tried_encodings.append((proposed, errors)) + markup = self.markup + # Convert smart quotes to HTML if coming from an encoding + # that might have them. + if (self.smart_quotes_to is not None + and proposed in self.ENCODINGS_WITH_SMART_QUOTES): + smart_quotes_re = b"([\x80-\x9f])" + smart_quotes_compiled = re.compile(smart_quotes_re) + markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) + + try: + #print("Trying to convert document to %s (errors=%s)" % ( + # proposed, errors)) + u = self._to_unicode(markup, proposed, errors) + self.markup = u + self.original_encoding = proposed + except Exception as e: + #print("That didn't work!") + #print(e) + return None + #print("Correct encoding: %s" % proposed) + return self.markup + + def _to_unicode(self, data, encoding, errors="strict"): + """Given a string and its encoding, decodes the string into Unicode. + + :param encoding: The name of an encoding. + """ + return str(data, encoding, errors) + + @property + def declared_html_encoding(self): + """If the markup is an HTML document, returns the encoding declared _within_ + the document. + """ + if not self.is_html: + return None + return self.detector.declared_encoding + + def find_codec(self, charset): + """Convert the name of a character set to a codec name. + + :param charset: The name of a character set. + :return: The name of a codec. + """ + value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) + or (charset and self._codec(charset.replace("-", ""))) + or (charset and self._codec(charset.replace("-", "_"))) + or (charset and charset.lower()) + or charset + ) + if value: + return value.lower() + return None + + def _codec(self, charset): + if not charset: + return charset + codec = None + try: + codecs.lookup(charset) + codec = charset + except (LookupError, ValueError): + pass + return codec + + + # A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities. + MS_CHARS = {b'\x80': ('euro', '20AC'), + b'\x81': ' ', + b'\x82': ('sbquo', '201A'), + b'\x83': ('fnof', '192'), + b'\x84': ('bdquo', '201E'), + b'\x85': ('hellip', '2026'), + b'\x86': ('dagger', '2020'), + b'\x87': ('Dagger', '2021'), + b'\x88': ('circ', '2C6'), + b'\x89': ('permil', '2030'), + b'\x8A': ('Scaron', '160'), + b'\x8B': ('lsaquo', '2039'), + b'\x8C': ('OElig', '152'), + b'\x8D': '?', + b'\x8E': ('#x17D', '17D'), + b'\x8F': '?', + b'\x90': '?', + b'\x91': ('lsquo', '2018'), + b'\x92': ('rsquo', '2019'), + b'\x93': ('ldquo', '201C'), + b'\x94': ('rdquo', '201D'), + b'\x95': ('bull', '2022'), + b'\x96': ('ndash', '2013'), + b'\x97': ('mdash', '2014'), + b'\x98': ('tilde', '2DC'), + b'\x99': ('trade', '2122'), + b'\x9a': ('scaron', '161'), + b'\x9b': ('rsaquo', '203A'), + b'\x9c': ('oelig', '153'), + b'\x9d': '?', + b'\x9e': ('#x17E', '17E'), + b'\x9f': ('Yuml', ''),} + + # A parochial partial mapping of ISO-Latin-1 to ASCII. Contains + # horrors like stripping diacritical marks to turn á into a, but also + # contains non-horrors like turning “ into ". + MS_CHARS_TO_ASCII = { + b'\x80' : 'EUR', + b'\x81' : ' ', + b'\x82' : ',', + b'\x83' : 'f', + b'\x84' : ',,', + b'\x85' : '...', + b'\x86' : '+', + b'\x87' : '++', + b'\x88' : '^', + b'\x89' : '%', + b'\x8a' : 'S', + b'\x8b' : '<', + b'\x8c' : 'OE', + b'\x8d' : '?', + b'\x8e' : 'Z', + b'\x8f' : '?', + b'\x90' : '?', + b'\x91' : "'", + b'\x92' : "'", + b'\x93' : '"', + b'\x94' : '"', + b'\x95' : '*', + b'\x96' : '-', + b'\x97' : '--', + b'\x98' : '~', + b'\x99' : '(TM)', + b'\x9a' : 's', + b'\x9b' : '>', + b'\x9c' : 'oe', + b'\x9d' : '?', + b'\x9e' : 'z', + b'\x9f' : 'Y', + b'\xa0' : ' ', + b'\xa1' : '!', + b'\xa2' : 'c', + b'\xa3' : 'GBP', + b'\xa4' : '$', #This approximation is especially parochial--this is the + #generic currency symbol. + b'\xa5' : 'YEN', + b'\xa6' : '|', + b'\xa7' : 'S', + b'\xa8' : '..', + b'\xa9' : '', + b'\xaa' : '(th)', + b'\xab' : '<<', + b'\xac' : '!', + b'\xad' : ' ', + b'\xae' : '(R)', + b'\xaf' : '-', + b'\xb0' : 'o', + b'\xb1' : '+-', + b'\xb2' : '2', + b'\xb3' : '3', + b'\xb4' : ("'", 'acute'), + b'\xb5' : 'u', + b'\xb6' : 'P', + b'\xb7' : '*', + b'\xb8' : ',', + b'\xb9' : '1', + b'\xba' : '(th)', + b'\xbb' : '>>', + b'\xbc' : '1/4', + b'\xbd' : '1/2', + b'\xbe' : '3/4', + b'\xbf' : '?', + b'\xc0' : 'A', + b'\xc1' : 'A', + b'\xc2' : 'A', + b'\xc3' : 'A', + b'\xc4' : 'A', + b'\xc5' : 'A', + b'\xc6' : 'AE', + b'\xc7' : 'C', + b'\xc8' : 'E', + b'\xc9' : 'E', + b'\xca' : 'E', + b'\xcb' : 'E', + b'\xcc' : 'I', + b'\xcd' : 'I', + b'\xce' : 'I', + b'\xcf' : 'I', + b'\xd0' : 'D', + b'\xd1' : 'N', + b'\xd2' : 'O', + b'\xd3' : 'O', + b'\xd4' : 'O', + b'\xd5' : 'O', + b'\xd6' : 'O', + b'\xd7' : '*', + b'\xd8' : 'O', + b'\xd9' : 'U', + b'\xda' : 'U', + b'\xdb' : 'U', + b'\xdc' : 'U', + b'\xdd' : 'Y', + b'\xde' : 'b', + b'\xdf' : 'B', + b'\xe0' : 'a', + b'\xe1' : 'a', + b'\xe2' : 'a', + b'\xe3' : 'a', + b'\xe4' : 'a', + b'\xe5' : 'a', + b'\xe6' : 'ae', + b'\xe7' : 'c', + b'\xe8' : 'e', + b'\xe9' : 'e', + b'\xea' : 'e', + b'\xeb' : 'e', + b'\xec' : 'i', + b'\xed' : 'i', + b'\xee' : 'i', + b'\xef' : 'i', + b'\xf0' : 'o', + b'\xf1' : 'n', + b'\xf2' : 'o', + b'\xf3' : 'o', + b'\xf4' : 'o', + b'\xf5' : 'o', + b'\xf6' : 'o', + b'\xf7' : '/', + b'\xf8' : 'o', + b'\xf9' : 'u', + b'\xfa' : 'u', + b'\xfb' : 'u', + b'\xfc' : 'u', + b'\xfd' : 'y', + b'\xfe' : 'b', + b'\xff' : 'y', + } + + # A map used when removing rogue Windows-1252/ISO-8859-1 + # characters in otherwise UTF-8 documents. + # + # Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in + # Windows-1252. + WINDOWS_1252_TO_UTF8 = { + 0x80 : b'\xe2\x82\xac', # € + 0x82 : b'\xe2\x80\x9a', # ‚ + 0x83 : b'\xc6\x92', # ƒ + 0x84 : b'\xe2\x80\x9e', # „ + 0x85 : b'\xe2\x80\xa6', # … + 0x86 : b'\xe2\x80\xa0', # † + 0x87 : b'\xe2\x80\xa1', # ‡ + 0x88 : b'\xcb\x86', # ˆ + 0x89 : b'\xe2\x80\xb0', # ‰ + 0x8a : b'\xc5\xa0', # Š + 0x8b : b'\xe2\x80\xb9', # ‹ + 0x8c : b'\xc5\x92', # Œ + 0x8e : b'\xc5\xbd', # Ž + 0x91 : b'\xe2\x80\x98', # ‘ + 0x92 : b'\xe2\x80\x99', # ’ + 0x93 : b'\xe2\x80\x9c', # “ + 0x94 : b'\xe2\x80\x9d', # ” + 0x95 : b'\xe2\x80\xa2', # • + 0x96 : b'\xe2\x80\x93', # – + 0x97 : b'\xe2\x80\x94', # — + 0x98 : b'\xcb\x9c', # ˜ + 0x99 : b'\xe2\x84\xa2', # ™ + 0x9a : b'\xc5\xa1', # š + 0x9b : b'\xe2\x80\xba', # › + 0x9c : b'\xc5\x93', # œ + 0x9e : b'\xc5\xbe', # ž + 0x9f : b'\xc5\xb8', # Ÿ + 0xa0 : b'\xc2\xa0', #   + 0xa1 : b'\xc2\xa1', # ¡ + 0xa2 : b'\xc2\xa2', # ¢ + 0xa3 : b'\xc2\xa3', # £ + 0xa4 : b'\xc2\xa4', # ¤ + 0xa5 : b'\xc2\xa5', # ¥ + 0xa6 : b'\xc2\xa6', # ¦ + 0xa7 : b'\xc2\xa7', # § + 0xa8 : b'\xc2\xa8', # ¨ + 0xa9 : b'\xc2\xa9', # © + 0xaa : b'\xc2\xaa', # ª + 0xab : b'\xc2\xab', # « + 0xac : b'\xc2\xac', # ¬ + 0xad : b'\xc2\xad', # ­ + 0xae : b'\xc2\xae', # ® + 0xaf : b'\xc2\xaf', # ¯ + 0xb0 : b'\xc2\xb0', # ° + 0xb1 : b'\xc2\xb1', # ± + 0xb2 : b'\xc2\xb2', # ² + 0xb3 : b'\xc2\xb3', # ³ + 0xb4 : b'\xc2\xb4', # ´ + 0xb5 : b'\xc2\xb5', # µ + 0xb6 : b'\xc2\xb6', # ¶ + 0xb7 : b'\xc2\xb7', # · + 0xb8 : b'\xc2\xb8', # ¸ + 0xb9 : b'\xc2\xb9', # ¹ + 0xba : b'\xc2\xba', # º + 0xbb : b'\xc2\xbb', # » + 0xbc : b'\xc2\xbc', # ¼ + 0xbd : b'\xc2\xbd', # ½ + 0xbe : b'\xc2\xbe', # ¾ + 0xbf : b'\xc2\xbf', # ¿ + 0xc0 : b'\xc3\x80', # À + 0xc1 : b'\xc3\x81', # Á + 0xc2 : b'\xc3\x82', #  + 0xc3 : b'\xc3\x83', # à + 0xc4 : b'\xc3\x84', # Ä + 0xc5 : b'\xc3\x85', # Å + 0xc6 : b'\xc3\x86', # Æ + 0xc7 : b'\xc3\x87', # Ç + 0xc8 : b'\xc3\x88', # È + 0xc9 : b'\xc3\x89', # É + 0xca : b'\xc3\x8a', # Ê + 0xcb : b'\xc3\x8b', # Ë + 0xcc : b'\xc3\x8c', # Ì + 0xcd : b'\xc3\x8d', # Í + 0xce : b'\xc3\x8e', # Î + 0xcf : b'\xc3\x8f', # Ï + 0xd0 : b'\xc3\x90', # Ð + 0xd1 : b'\xc3\x91', # Ñ + 0xd2 : b'\xc3\x92', # Ò + 0xd3 : b'\xc3\x93', # Ó + 0xd4 : b'\xc3\x94', # Ô + 0xd5 : b'\xc3\x95', # Õ + 0xd6 : b'\xc3\x96', # Ö + 0xd7 : b'\xc3\x97', # × + 0xd8 : b'\xc3\x98', # Ø + 0xd9 : b'\xc3\x99', # Ù + 0xda : b'\xc3\x9a', # Ú + 0xdb : b'\xc3\x9b', # Û + 0xdc : b'\xc3\x9c', # Ü + 0xdd : b'\xc3\x9d', # Ý + 0xde : b'\xc3\x9e', # Þ + 0xdf : b'\xc3\x9f', # ß + 0xe0 : b'\xc3\xa0', # à + 0xe1 : b'\xa1', # á + 0xe2 : b'\xc3\xa2', # â + 0xe3 : b'\xc3\xa3', # ã + 0xe4 : b'\xc3\xa4', # ä + 0xe5 : b'\xc3\xa5', # å + 0xe6 : b'\xc3\xa6', # æ + 0xe7 : b'\xc3\xa7', # ç + 0xe8 : b'\xc3\xa8', # è + 0xe9 : b'\xc3\xa9', # é + 0xea : b'\xc3\xaa', # ê + 0xeb : b'\xc3\xab', # ë + 0xec : b'\xc3\xac', # ì + 0xed : b'\xc3\xad', # í + 0xee : b'\xc3\xae', # î + 0xef : b'\xc3\xaf', # ï + 0xf0 : b'\xc3\xb0', # ð + 0xf1 : b'\xc3\xb1', # ñ + 0xf2 : b'\xc3\xb2', # ò + 0xf3 : b'\xc3\xb3', # ó + 0xf4 : b'\xc3\xb4', # ô + 0xf5 : b'\xc3\xb5', # õ + 0xf6 : b'\xc3\xb6', # ö + 0xf7 : b'\xc3\xb7', # ÷ + 0xf8 : b'\xc3\xb8', # ø + 0xf9 : b'\xc3\xb9', # ù + 0xfa : b'\xc3\xba', # ú + 0xfb : b'\xc3\xbb', # û + 0xfc : b'\xc3\xbc', # ü + 0xfd : b'\xc3\xbd', # ý + 0xfe : b'\xc3\xbe', # þ + } + + MULTIBYTE_MARKERS_AND_SIZES = [ + (0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF + (0xe0, 0xef, 3), # 3-byte characters start with E0-EF + (0xf0, 0xf4, 4), # 4-byte characters start with F0-F4 + ] + + FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0] + LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1] + + @classmethod + def detwingle(cls, in_bytes, main_encoding="utf8", + embedded_encoding="windows-1252"): + """Fix characters from one encoding embedded in some other encoding. + + Currently the only situation supported is Windows-1252 (or its + subset ISO-8859-1), embedded in UTF-8. + + :param in_bytes: A bytestring that you suspect contains + characters from multiple encodings. Note that this _must_ + be a bytestring. If you've already converted the document + to Unicode, you're too late. + :param main_encoding: The primary encoding of `in_bytes`. + :param embedded_encoding: The encoding that was used to embed characters + in the main document. + :return: A bytestring in which `embedded_encoding` + characters have been converted to their `main_encoding` + equivalents. + """ + if embedded_encoding.replace('_', '-').lower() not in ( + 'windows-1252', 'windows_1252'): + raise NotImplementedError( + "Windows-1252 and ISO-8859-1 are the only currently supported " + "embedded encodings.") + + if main_encoding.lower() not in ('utf8', 'utf-8'): + raise NotImplementedError( + "UTF-8 is the only currently supported main encoding.") + + byte_chunks = [] + + chunk_start = 0 + pos = 0 + while pos < len(in_bytes): + byte = in_bytes[pos] + if not isinstance(byte, int): + # Python 2.x + byte = ord(byte) + if (byte >= cls.FIRST_MULTIBYTE_MARKER + and byte <= cls.LAST_MULTIBYTE_MARKER): + # This is the start of a UTF-8 multibyte character. Skip + # to the end. + for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES: + if byte >= start and byte <= end: + pos += size + break + elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8: + # We found a Windows-1252 character! + # Save the string up to this point as a chunk. + byte_chunks.append(in_bytes[chunk_start:pos]) + + # Now translate the Windows-1252 character into UTF-8 + # and add it as another, one-byte chunk. + byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte]) + pos += 1 + chunk_start = pos + else: + # Go on to the next character. + pos += 1 + if chunk_start == 0: + # The string is unchanged. + return in_bytes + else: + # Store the final chunk. + byte_chunks.append(in_bytes[chunk_start:]) + return b''.join(byte_chunks) + diff --git a/sbsheriff/Lib/site-packages/bs4/diagnose.py b/sbsheriff/Lib/site-packages/bs4/diagnose.py new file mode 100644 index 0000000..3bf583f --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/diagnose.py @@ -0,0 +1,248 @@ +"""Diagnostic functions, mainly for use when doing tech support.""" + +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +import cProfile +from io import BytesIO +from html.parser import HTMLParser +import bs4 +from bs4 import BeautifulSoup, __version__ +from bs4.builder import builder_registry + +import os +import pstats +import random +import tempfile +import time +import traceback +import sys +import cProfile + +def diagnose(data): + """Diagnostic suite for isolating common problems. + + :param data: A string containing markup that needs to be explained. + :return: None; diagnostics are printed to standard output. + """ + print(("Diagnostic running on Beautiful Soup %s" % __version__)) + print(("Python version %s" % sys.version)) + + basic_parsers = ["html.parser", "html5lib", "lxml"] + for name in basic_parsers: + for builder in builder_registry.builders: + if name in builder.features: + break + else: + basic_parsers.remove(name) + print(( + "I noticed that %s is not installed. Installing it may help." % + name)) + + if 'lxml' in basic_parsers: + basic_parsers.append("lxml-xml") + try: + from lxml import etree + print(("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)))) + except ImportError as e: + print( + "lxml is not installed or couldn't be imported.") + + + if 'html5lib' in basic_parsers: + try: + import html5lib + print(("Found html5lib version %s" % html5lib.__version__)) + except ImportError as e: + print( + "html5lib is not installed or couldn't be imported.") + + if hasattr(data, 'read'): + data = data.read() + elif data.startswith("http:") or data.startswith("https:"): + print(('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data)) + print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.") + return + else: + try: + if os.path.exists(data): + print(('"%s" looks like a filename. Reading data from the file.' % data)) + with open(data) as fp: + data = fp.read() + except ValueError: + # This can happen on some platforms when the 'filename' is + # too long. Assume it's data and not a filename. + pass + print("") + + for parser in basic_parsers: + print(("Trying to parse your markup with %s" % parser)) + success = False + try: + soup = BeautifulSoup(data, features=parser) + success = True + except Exception as e: + print(("%s could not parse the markup." % parser)) + traceback.print_exc() + if success: + print(("Here's what %s did with the markup:" % parser)) + print((soup.prettify())) + + print(("-" * 80)) + +def lxml_trace(data, html=True, **kwargs): + """Print out the lxml events that occur during parsing. + + This lets you see how lxml parses a document when no Beautiful + Soup code is running. You can use this to determine whether + an lxml-specific problem is in Beautiful Soup's lxml tree builders + or in lxml itself. + + :param data: Some markup. + :param html: If True, markup will be parsed with lxml's HTML parser. + if False, lxml's XML parser will be used. + """ + from lxml import etree + recover = kwargs.pop('recover', True) + if isinstance(data, str): + data = data.encode("utf8") + reader = BytesIO(data) + for event, element in etree.iterparse( + reader, html=html, recover=recover, **kwargs + ): + print(("%s, %4s, %s" % (event, element.tag, element.text))) + +class AnnouncingParser(HTMLParser): + """Subclass of HTMLParser that announces parse events, without doing + anything else. + + You can use this to get a picture of how html.parser sees a given + document. The easiest way to do this is to call `htmlparser_trace`. + """ + + def _p(self, s): + print(s) + + def handle_starttag(self, name, attrs): + self._p("%s START" % name) + + def handle_endtag(self, name): + self._p("%s END" % name) + + def handle_data(self, data): + self._p("%s DATA" % data) + + def handle_charref(self, name): + self._p("%s CHARREF" % name) + + def handle_entityref(self, name): + self._p("%s ENTITYREF" % name) + + def handle_comment(self, data): + self._p("%s COMMENT" % data) + + def handle_decl(self, data): + self._p("%s DECL" % data) + + def unknown_decl(self, data): + self._p("%s UNKNOWN-DECL" % data) + + def handle_pi(self, data): + self._p("%s PI" % data) + +def htmlparser_trace(data): + """Print out the HTMLParser events that occur during parsing. + + This lets you see how HTMLParser parses a document when no + Beautiful Soup code is running. + + :param data: Some markup. + """ + parser = AnnouncingParser() + parser.feed(data) + +_vowels = "aeiou" +_consonants = "bcdfghjklmnpqrstvwxyz" + +def rword(length=5): + "Generate a random word-like string." + s = '' + for i in range(length): + if i % 2 == 0: + t = _consonants + else: + t = _vowels + s += random.choice(t) + return s + +def rsentence(length=4): + "Generate a random sentence-like string." + return " ".join(rword(random.randint(4,9)) for i in range(length)) + +def rdoc(num_elements=1000): + """Randomly generate an invalid HTML document.""" + tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table'] + elements = [] + for i in range(num_elements): + choice = random.randint(0,3) + if choice == 0: + # New tag. + tag_name = random.choice(tag_names) + elements.append("<%s>" % tag_name) + elif choice == 1: + elements.append(rsentence(random.randint(1,4))) + elif choice == 2: + # Close a tag. + tag_name = random.choice(tag_names) + elements.append("" % tag_name) + return "" + "\n".join(elements) + "" + +def benchmark_parsers(num_elements=100000): + """Very basic head-to-head performance benchmark.""" + print(("Comparative parser benchmark on Beautiful Soup %s" % __version__)) + data = rdoc(num_elements) + print(("Generated a large invalid HTML document (%d bytes)." % len(data))) + + for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: + success = False + try: + a = time.time() + soup = BeautifulSoup(data, parser) + b = time.time() + success = True + except Exception as e: + print(("%s could not parse the markup." % parser)) + traceback.print_exc() + if success: + print(("BS4+%s parsed the markup in %.2fs." % (parser, b-a))) + + from lxml import etree + a = time.time() + etree.HTML(data) + b = time.time() + print(("Raw lxml parsed the markup in %.2fs." % (b-a))) + + import html5lib + parser = html5lib.HTMLParser() + a = time.time() + parser.parse(data) + b = time.time() + print(("Raw html5lib parsed the markup in %.2fs." % (b-a))) + +def profile(num_elements=100000, parser="lxml"): + """Use Python's profiler on a randomly generated document.""" + filehandle = tempfile.NamedTemporaryFile() + filename = filehandle.name + + data = rdoc(num_elements) + vars = dict(bs4=bs4, data=data, parser=parser) + cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename) + + stats = pstats.Stats(filename) + # stats.strip_dirs() + stats.sort_stats("cumulative") + stats.print_stats('_html5lib|bs4', 50) + +# If this file is run as a script, standard input is diagnosed. +if __name__ == '__main__': + diagnose(sys.stdin.read()) diff --git a/sbsheriff/Lib/site-packages/bs4/element.py b/sbsheriff/Lib/site-packages/bs4/element.py new file mode 100644 index 0000000..74b1dc0 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/element.py @@ -0,0 +1,2291 @@ +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +try: + from collections.abc import Callable # Python 3.6 +except ImportError as e: + from collections import Callable +import re +import sys +import warnings +try: + import soupsieve +except ImportError as e: + soupsieve = None + warnings.warn( + 'The soupsieve package is not installed. CSS selectors cannot be used.' + ) + +from bs4.formatter import ( + Formatter, + HTMLFormatter, + XMLFormatter, +) + +DEFAULT_OUTPUT_ENCODING = "utf-8" + +nonwhitespace_re = re.compile(r"\S+") + +# NOTE: This isn't used as of 4.7.0. I'm leaving it for a little bit on +# the off chance someone imported it for their own use. +whitespace_re = re.compile(r"\s+") + +def _alias(attr): + """Alias one attribute name to another for backward compatibility""" + @property + def alias(self): + return getattr(self, attr) + + @alias.setter + def alias(self): + return setattr(self, attr) + return alias + + +# These encodings are recognized by Python (so PageElement.encode +# could theoretically support them) but XML and HTML don't recognize +# them (so they should not show up in an XML or HTML document as that +# document's encoding). +# +# If an XML document is encoded in one of these encodings, no encoding +# will be mentioned in the XML declaration. If an HTML document is +# encoded in one of these encodings, and the HTML document has a +# tag that mentions an encoding, the encoding will be given as +# the empty string. +# +# Source: +# https://docs.python.org/3/library/codecs.html#python-specific-encodings +PYTHON_SPECIFIC_ENCODINGS = set([ + "idna", + "mbcs", + "oem", + "palmos", + "punycode", + "raw_unicode_escape", + "undefined", + "unicode_escape", + "raw-unicode-escape", + "unicode-escape", + "string-escape", + "string_escape", +]) + + +class NamespacedAttribute(str): + """A namespaced string (e.g. 'xml:lang') that remembers the namespace + ('xml') and the name ('lang') that were used to create it. + """ + + def __new__(cls, prefix, name=None, namespace=None): + if not name: + # This is the default namespace. Its name "has no value" + # per https://www.w3.org/TR/xml-names/#defaulting + name = None + + if not name: + obj = str.__new__(cls, prefix) + elif not prefix: + # Not really namespaced. + obj = str.__new__(cls, name) + else: + obj = str.__new__(cls, prefix + ":" + name) + obj.prefix = prefix + obj.name = name + obj.namespace = namespace + return obj + +class AttributeValueWithCharsetSubstitution(str): + """A stand-in object for a character encoding specified in HTML.""" + +class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution): + """A generic stand-in for the value of a meta tag's 'charset' attribute. + + When Beautiful Soup parses the markup '', the + value of the 'charset' attribute will be one of these objects. + """ + + def __new__(cls, original_value): + obj = str.__new__(cls, original_value) + obj.original_value = original_value + return obj + + def encode(self, encoding): + """When an HTML document is being encoded to a given encoding, the + value of a meta tag's 'charset' is the name of the encoding. + """ + if encoding in PYTHON_SPECIFIC_ENCODINGS: + return '' + return encoding + + +class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution): + """A generic stand-in for the value of a meta tag's 'content' attribute. + + When Beautiful Soup parses the markup: + + + The value of the 'content' attribute will be one of these objects. + """ + + CHARSET_RE = re.compile(r"((^|;)\s*charset=)([^;]*)", re.M) + + def __new__(cls, original_value): + match = cls.CHARSET_RE.search(original_value) + if match is None: + # No substitution necessary. + return str.__new__(str, original_value) + + obj = str.__new__(cls, original_value) + obj.original_value = original_value + return obj + + def encode(self, encoding): + if encoding in PYTHON_SPECIFIC_ENCODINGS: + return '' + def rewrite(match): + return match.group(1) + encoding + return self.CHARSET_RE.sub(rewrite, self.original_value) + + +class PageElement(object): + """Contains the navigational information for some part of the page: + that is, its current location in the parse tree. + + NavigableString, Tag, etc. are all subclasses of PageElement. + """ + + def setup(self, parent=None, previous_element=None, next_element=None, + previous_sibling=None, next_sibling=None): + """Sets up the initial relations between this element and + other elements. + + :param parent: The parent of this element. + + :param previous_element: The element parsed immediately before + this one. + + :param next_element: The element parsed immediately before + this one. + + :param previous_sibling: The most recently encountered element + on the same level of the parse tree as this one. + + :param previous_sibling: The next element to be encountered + on the same level of the parse tree as this one. + """ + self.parent = parent + + self.previous_element = previous_element + if previous_element is not None: + self.previous_element.next_element = self + + self.next_element = next_element + if self.next_element is not None: + self.next_element.previous_element = self + + self.next_sibling = next_sibling + if self.next_sibling is not None: + self.next_sibling.previous_sibling = self + + if (previous_sibling is None + and self.parent is not None and self.parent.contents): + previous_sibling = self.parent.contents[-1] + + self.previous_sibling = previous_sibling + if previous_sibling is not None: + self.previous_sibling.next_sibling = self + + def format_string(self, s, formatter): + """Format the given string using the given formatter. + + :param s: A string. + :param formatter: A Formatter object, or a string naming one of the standard formatters. + """ + if formatter is None: + return s + if not isinstance(formatter, Formatter): + formatter = self.formatter_for_name(formatter) + output = formatter.substitute(s) + return output + + def formatter_for_name(self, formatter): + """Look up or create a Formatter for the given identifier, + if necessary. + + :param formatter: Can be a Formatter object (used as-is), a + function (used as the entity substitution hook for an + XMLFormatter or HTMLFormatter), or a string (used to look + up an XMLFormatter or HTMLFormatter in the appropriate + registry. + """ + if isinstance(formatter, Formatter): + return formatter + if self._is_xml: + c = XMLFormatter + else: + c = HTMLFormatter + if isinstance(formatter, Callable): + return c(entity_substitution=formatter) + return c.REGISTRY[formatter] + + @property + def _is_xml(self): + """Is this element part of an XML tree or an HTML tree? + + This is used in formatter_for_name, when deciding whether an + XMLFormatter or HTMLFormatter is more appropriate. It can be + inefficient, but it should be called very rarely. + """ + if self.known_xml is not None: + # Most of the time we will have determined this when the + # document is parsed. + return self.known_xml + + # Otherwise, it's likely that this element was created by + # direct invocation of the constructor from within the user's + # Python code. + if self.parent is None: + # This is the top-level object. It should have .known_xml set + # from tree creation. If not, take a guess--BS is usually + # used on HTML markup. + return getattr(self, 'is_xml', False) + return self.parent._is_xml + + nextSibling = _alias("next_sibling") # BS3 + previousSibling = _alias("previous_sibling") # BS3 + + default = object() + def _all_strings(self, strip=False, types=default): + """Yield all strings of certain classes, possibly stripping them. + + This is implemented differently in Tag and NavigableString. + """ + raise NotImplementedError() + + @property + def stripped_strings(self): + """Yield all strings in this PageElement, stripping them first. + + :yield: A sequence of stripped strings. + """ + for string in self._all_strings(True): + yield string + + def get_text(self, separator="", strip=False, + types=default): + """Get all child strings of this PageElement, concatenated using the + given separator. + + :param separator: Strings will be concatenated using this separator. + + :param strip: If True, strings will be stripped before being + concatenated. + + :param types: A tuple of NavigableString subclasses. Any + strings of a subclass not found in this list will be + ignored. Although there are exceptions, the default + behavior in most cases is to consider only NavigableString + and CData objects. That means no comments, processing + instructions, etc. + + :return: A string. + """ + return separator.join([s for s in self._all_strings( + strip, types=types)]) + getText = get_text + text = property(get_text) + + def replace_with(self, *args): + """Replace this PageElement with one or more PageElements, keeping the + rest of the tree the same. + + :param args: One or more PageElements. + :return: `self`, no longer part of the tree. + """ + if self.parent is None: + raise ValueError( + "Cannot replace one element with another when the " + "element to be replaced is not part of a tree.") + if len(args) == 1 and args[0] is self: + return + if any(x is self.parent for x in args): + raise ValueError("Cannot replace a Tag with its parent.") + old_parent = self.parent + my_index = self.parent.index(self) + self.extract(_self_index=my_index) + for idx, replace_with in enumerate(args, start=my_index): + old_parent.insert(idx, replace_with) + return self + replaceWith = replace_with # BS3 + + def unwrap(self): + """Replace this PageElement with its contents. + + :return: `self`, no longer part of the tree. + """ + my_parent = self.parent + if self.parent is None: + raise ValueError( + "Cannot replace an element with its contents when that" + "element is not part of a tree.") + my_index = self.parent.index(self) + self.extract(_self_index=my_index) + for child in reversed(self.contents[:]): + my_parent.insert(my_index, child) + return self + replace_with_children = unwrap + replaceWithChildren = unwrap # BS3 + + def wrap(self, wrap_inside): + """Wrap this PageElement inside another one. + + :param wrap_inside: A PageElement. + :return: `wrap_inside`, occupying the position in the tree that used + to be occupied by `self`, and with `self` inside it. + """ + me = self.replace_with(wrap_inside) + wrap_inside.append(me) + return wrap_inside + + def extract(self, _self_index=None): + """Destructively rips this element out of the tree. + + :param _self_index: The location of this element in its parent's + .contents, if known. Passing this in allows for a performance + optimization. + + :return: `self`, no longer part of the tree. + """ + if self.parent is not None: + if _self_index is None: + _self_index = self.parent.index(self) + del self.parent.contents[_self_index] + + #Find the two elements that would be next to each other if + #this element (and any children) hadn't been parsed. Connect + #the two. + last_child = self._last_descendant() + next_element = last_child.next_element + + if (self.previous_element is not None and + self.previous_element is not next_element): + self.previous_element.next_element = next_element + if next_element is not None and next_element is not self.previous_element: + next_element.previous_element = self.previous_element + self.previous_element = None + last_child.next_element = None + + self.parent = None + if (self.previous_sibling is not None + and self.previous_sibling is not self.next_sibling): + self.previous_sibling.next_sibling = self.next_sibling + if (self.next_sibling is not None + and self.next_sibling is not self.previous_sibling): + self.next_sibling.previous_sibling = self.previous_sibling + self.previous_sibling = self.next_sibling = None + return self + + def _last_descendant(self, is_initialized=True, accept_self=True): + """Finds the last element beneath this object to be parsed. + + :param is_initialized: Has `setup` been called on this PageElement + yet? + :param accept_self: Is `self` an acceptable answer to the question? + """ + if is_initialized and self.next_sibling is not None: + last_child = self.next_sibling.previous_element + else: + last_child = self + while isinstance(last_child, Tag) and last_child.contents: + last_child = last_child.contents[-1] + if not accept_self and last_child is self: + last_child = None + return last_child + # BS3: Not part of the API! + _lastRecursiveChild = _last_descendant + + def insert(self, position, new_child): + """Insert a new PageElement in the list of this PageElement's children. + + This works the same way as `list.insert`. + + :param position: The numeric position that should be occupied + in `self.children` by the new PageElement. + :param new_child: A PageElement. + """ + if new_child is None: + raise ValueError("Cannot insert None into a tag.") + if new_child is self: + raise ValueError("Cannot insert a tag into itself.") + if (isinstance(new_child, str) + and not isinstance(new_child, NavigableString)): + new_child = NavigableString(new_child) + + from bs4 import BeautifulSoup + if isinstance(new_child, BeautifulSoup): + # We don't want to end up with a situation where one BeautifulSoup + # object contains another. Insert the children one at a time. + for subchild in list(new_child.contents): + self.insert(position, subchild) + position += 1 + return + position = min(position, len(self.contents)) + if hasattr(new_child, 'parent') and new_child.parent is not None: + # We're 'inserting' an element that's already one + # of this object's children. + if new_child.parent is self: + current_index = self.index(new_child) + if current_index < position: + # We're moving this element further down the list + # of this object's children. That means that when + # we extract this element, our target index will + # jump down one. + position -= 1 + new_child.extract() + + new_child.parent = self + previous_child = None + if position == 0: + new_child.previous_sibling = None + new_child.previous_element = self + else: + previous_child = self.contents[position - 1] + new_child.previous_sibling = previous_child + new_child.previous_sibling.next_sibling = new_child + new_child.previous_element = previous_child._last_descendant(False) + if new_child.previous_element is not None: + new_child.previous_element.next_element = new_child + + new_childs_last_element = new_child._last_descendant(False) + + if position >= len(self.contents): + new_child.next_sibling = None + + parent = self + parents_next_sibling = None + while parents_next_sibling is None and parent is not None: + parents_next_sibling = parent.next_sibling + parent = parent.parent + if parents_next_sibling is not None: + # We found the element that comes next in the document. + break + if parents_next_sibling is not None: + new_childs_last_element.next_element = parents_next_sibling + else: + # The last element of this tag is the last element in + # the document. + new_childs_last_element.next_element = None + else: + next_child = self.contents[position] + new_child.next_sibling = next_child + if new_child.next_sibling is not None: + new_child.next_sibling.previous_sibling = new_child + new_childs_last_element.next_element = next_child + + if new_childs_last_element.next_element is not None: + new_childs_last_element.next_element.previous_element = new_childs_last_element + self.contents.insert(position, new_child) + + def append(self, tag): + """Appends the given PageElement to the contents of this one. + + :param tag: A PageElement. + """ + self.insert(len(self.contents), tag) + + def extend(self, tags): + """Appends the given PageElements to this one's contents. + + :param tags: A list of PageElements. + """ + if isinstance(tags, Tag): + # Calling self.append() on another tag's contents will change + # the list we're iterating over. Make a list that won't + # change. + tags = list(tags.contents) + for tag in tags: + self.append(tag) + + def insert_before(self, *args): + """Makes the given element(s) the immediate predecessor of this one. + + All the elements will have the same parent, and the given elements + will be immediately before this one. + + :param args: One or more PageElements. + """ + parent = self.parent + if parent is None: + raise ValueError( + "Element has no parent, so 'before' has no meaning.") + if any(x is self for x in args): + raise ValueError("Can't insert an element before itself.") + for predecessor in args: + # Extract first so that the index won't be screwed up if they + # are siblings. + if isinstance(predecessor, PageElement): + predecessor.extract() + index = parent.index(self) + parent.insert(index, predecessor) + + def insert_after(self, *args): + """Makes the given element(s) the immediate successor of this one. + + The elements will have the same parent, and the given elements + will be immediately after this one. + + :param args: One or more PageElements. + """ + # Do all error checking before modifying the tree. + parent = self.parent + if parent is None: + raise ValueError( + "Element has no parent, so 'after' has no meaning.") + if any(x is self for x in args): + raise ValueError("Can't insert an element after itself.") + + offset = 0 + for successor in args: + # Extract first so that the index won't be screwed up if they + # are siblings. + if isinstance(successor, PageElement): + successor.extract() + index = parent.index(self) + parent.insert(index+1+offset, successor) + offset += 1 + + def find_next(self, name=None, attrs={}, string=None, **kwargs): + """Find the first PageElement that matches the given criteria and + appears later in the document than this PageElement. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :kwargs: A dictionary of filters on attribute values. + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self._find_one(self.find_all_next, name, attrs, string, **kwargs) + findNext = find_next # BS3 + + def find_all_next(self, name=None, attrs={}, string=None, limit=None, + **kwargs): + """Find all PageElements that match the given criteria and appear + later in the document than this PageElement. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :param limit: Stop looking after finding this many results. + :kwargs: A dictionary of filters on attribute values. + :return: A ResultSet containing PageElements. + """ + return self._find_all(name, attrs, string, limit, self.next_elements, + **kwargs) + findAllNext = find_all_next # BS3 + + def find_next_sibling(self, name=None, attrs={}, string=None, **kwargs): + """Find the closest sibling to this PageElement that matches the + given criteria and appears later in the document. + + All find_* methods take a common set of arguments. See the + online documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :kwargs: A dictionary of filters on attribute values. + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self._find_one(self.find_next_siblings, name, attrs, string, + **kwargs) + findNextSibling = find_next_sibling # BS3 + + def find_next_siblings(self, name=None, attrs={}, string=None, limit=None, + **kwargs): + """Find all siblings of this PageElement that match the given criteria + and appear later in the document. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :param limit: Stop looking after finding this many results. + :kwargs: A dictionary of filters on attribute values. + :return: A ResultSet of PageElements. + :rtype: bs4.element.ResultSet + """ + return self._find_all(name, attrs, string, limit, + self.next_siblings, **kwargs) + findNextSiblings = find_next_siblings # BS3 + fetchNextSiblings = find_next_siblings # BS2 + + def find_previous(self, name=None, attrs={}, string=None, **kwargs): + """Look backwards in the document from this PageElement and find the + first PageElement that matches the given criteria. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :kwargs: A dictionary of filters on attribute values. + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self._find_one( + self.find_all_previous, name, attrs, string, **kwargs) + findPrevious = find_previous # BS3 + + def find_all_previous(self, name=None, attrs={}, string=None, limit=None, + **kwargs): + """Look backwards in the document from this PageElement and find all + PageElements that match the given criteria. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :param limit: Stop looking after finding this many results. + :kwargs: A dictionary of filters on attribute values. + :return: A ResultSet of PageElements. + :rtype: bs4.element.ResultSet + """ + return self._find_all(name, attrs, string, limit, self.previous_elements, + **kwargs) + findAllPrevious = find_all_previous # BS3 + fetchPrevious = find_all_previous # BS2 + + def find_previous_sibling(self, name=None, attrs={}, string=None, **kwargs): + """Returns the closest sibling to this PageElement that matches the + given criteria and appears earlier in the document. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :kwargs: A dictionary of filters on attribute values. + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self._find_one(self.find_previous_siblings, name, attrs, string, + **kwargs) + findPreviousSibling = find_previous_sibling # BS3 + + def find_previous_siblings(self, name=None, attrs={}, string=None, + limit=None, **kwargs): + """Returns all siblings to this PageElement that match the + given criteria and appear earlier in the document. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param string: A filter for a NavigableString with specific text. + :param limit: Stop looking after finding this many results. + :kwargs: A dictionary of filters on attribute values. + :return: A ResultSet of PageElements. + :rtype: bs4.element.ResultSet + """ + return self._find_all(name, attrs, string, limit, + self.previous_siblings, **kwargs) + findPreviousSiblings = find_previous_siblings # BS3 + fetchPreviousSiblings = find_previous_siblings # BS2 + + def find_parent(self, name=None, attrs={}, **kwargs): + """Find the closest parent of this PageElement that matches the given + criteria. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :kwargs: A dictionary of filters on attribute values. + + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + # NOTE: We can't use _find_one because findParents takes a different + # set of arguments. + r = None + l = self.find_parents(name, attrs, 1, **kwargs) + if l: + r = l[0] + return r + findParent = find_parent # BS3 + + def find_parents(self, name=None, attrs={}, limit=None, **kwargs): + """Find all parents of this PageElement that match the given criteria. + + All find_* methods take a common set of arguments. See the online + documentation for detailed explanations. + + :param name: A filter on tag name. + :param attrs: A dictionary of filters on attribute values. + :param limit: Stop looking after finding this many results. + :kwargs: A dictionary of filters on attribute values. + + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self._find_all(name, attrs, None, limit, self.parents, + **kwargs) + findParents = find_parents # BS3 + fetchParents = find_parents # BS2 + + @property + def next(self): + """The PageElement, if any, that was parsed just after this one. + + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self.next_element + + @property + def previous(self): + """The PageElement, if any, that was parsed just before this one. + + :return: A PageElement. + :rtype: bs4.element.Tag | bs4.element.NavigableString + """ + return self.previous_element + + #These methods do the real heavy lifting. + + def _find_one(self, method, name, attrs, string, **kwargs): + r = None + l = method(name, attrs, string, 1, **kwargs) + if l: + r = l[0] + return r + + def _find_all(self, name, attrs, string, limit, generator, **kwargs): + "Iterates over a generator looking for things that match." + + if string is None and 'text' in kwargs: + string = kwargs.pop('text') + warnings.warn( + "The 'text' argument to find()-type methods is deprecated. Use 'string' instead.", + DeprecationWarning + ) + + if isinstance(name, SoupStrainer): + strainer = name + else: + strainer = SoupStrainer(name, attrs, string, **kwargs) + + if string is None and not limit and not attrs and not kwargs: + if name is True or name is None: + # Optimization to find all tags. + result = (element for element in generator + if isinstance(element, Tag)) + return ResultSet(strainer, result) + elif isinstance(name, str): + # Optimization to find all tags with a given name. + if name.count(':') == 1: + # This is a name with a prefix. If this is a namespace-aware document, + # we need to match the local name against tag.name. If not, + # we need to match the fully-qualified name against tag.name. + prefix, local_name = name.split(':', 1) + else: + prefix = None + local_name = name + result = (element for element in generator + if isinstance(element, Tag) + and ( + element.name == name + ) or ( + element.name == local_name + and (prefix is None or element.prefix == prefix) + ) + ) + return ResultSet(strainer, result) + results = ResultSet(strainer) + while True: + try: + i = next(generator) + except StopIteration: + break + if i: + found = strainer.search(i) + if found: + results.append(found) + if limit and len(results) >= limit: + break + return results + + #These generators can be used to navigate starting from both + #NavigableStrings and Tags. + @property + def next_elements(self): + """All PageElements that were parsed after this one. + + :yield: A sequence of PageElements. + """ + i = self.next_element + while i is not None: + yield i + i = i.next_element + + @property + def next_siblings(self): + """All PageElements that are siblings of this one but were parsed + later. + + :yield: A sequence of PageElements. + """ + i = self.next_sibling + while i is not None: + yield i + i = i.next_sibling + + @property + def previous_elements(self): + """All PageElements that were parsed before this one. + + :yield: A sequence of PageElements. + """ + i = self.previous_element + while i is not None: + yield i + i = i.previous_element + + @property + def previous_siblings(self): + """All PageElements that are siblings of this one but were parsed + earlier. + + :yield: A sequence of PageElements. + """ + i = self.previous_sibling + while i is not None: + yield i + i = i.previous_sibling + + @property + def parents(self): + """All PageElements that are parents of this PageElement. + + :yield: A sequence of PageElements. + """ + i = self.parent + while i is not None: + yield i + i = i.parent + + @property + def decomposed(self): + """Check whether a PageElement has been decomposed. + + :rtype: bool + """ + return getattr(self, '_decomposed', False) or False + + # Old non-property versions of the generators, for backwards + # compatibility with BS3. + def nextGenerator(self): + return self.next_elements + + def nextSiblingGenerator(self): + return self.next_siblings + + def previousGenerator(self): + return self.previous_elements + + def previousSiblingGenerator(self): + return self.previous_siblings + + def parentGenerator(self): + return self.parents + + +class NavigableString(str, PageElement): + """A Python Unicode string that is part of a parse tree. + + When Beautiful Soup parses the markup penguin, it will + create a NavigableString for the string "penguin". + """ + + PREFIX = '' + SUFFIX = '' + + # We can't tell just by looking at a string whether it's contained + # in an XML document or an HTML document. + + known_xml = None + + def __new__(cls, value): + """Create a new NavigableString. + + When unpickling a NavigableString, this method is called with + the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be + passed in to the superclass's __new__ or the superclass won't know + how to handle non-ASCII characters. + """ + if isinstance(value, str): + u = str.__new__(cls, value) + else: + u = str.__new__(cls, value, DEFAULT_OUTPUT_ENCODING) + u.setup() + return u + + def __copy__(self): + """A copy of a NavigableString has the same contents and class + as the original, but it is not connected to the parse tree. + """ + return type(self)(self) + + def __getnewargs__(self): + return (str(self),) + + def __getattr__(self, attr): + """text.string gives you text. This is for backwards + compatibility for Navigable*String, but for CData* it lets you + get the string without the CData wrapper.""" + if attr == 'string': + return self + else: + raise AttributeError( + "'%s' object has no attribute '%s'" % ( + self.__class__.__name__, attr)) + + def output_ready(self, formatter="minimal"): + """Run the string through the provided formatter. + + :param formatter: A Formatter object, or a string naming one of the standard formatters. + """ + output = self.format_string(self, formatter) + return self.PREFIX + output + self.SUFFIX + + @property + def name(self): + """Since a NavigableString is not a Tag, it has no .name. + + This property is implemented so that code like this doesn't crash + when run on a mixture of Tag and NavigableString objects: + [x.name for x in tag.children] + """ + return None + + @name.setter + def name(self, name): + """Prevent NavigableString.name from ever being set.""" + raise AttributeError("A NavigableString cannot be given a name.") + + def _all_strings(self, strip=False, types=PageElement.default): + """Yield all strings of certain classes, possibly stripping them. + + This makes it easy for NavigableString to implement methods + like get_text() as conveniences, creating a consistent + text-extraction API across all PageElements. + + :param strip: If True, all strings will be stripped before being + yielded. + + :param types: A tuple of NavigableString subclasses. If this + NavigableString isn't one of those subclasses, the + sequence will be empty. By default, the subclasses + considered are NavigableString and CData objects. That + means no comments, processing instructions, etc. + + :yield: A sequence that either contains this string, or is empty. + + """ + if types is self.default: + # This is kept in Tag because it's full of subclasses of + # this class, which aren't defined until later in the file. + types = Tag.DEFAULT_INTERESTING_STRING_TYPES + + # Do nothing if the caller is looking for specific types of + # string, and we're of a different type. + # + # We check specific types instead of using isinstance(self, + # types) because all of these classes subclass + # NavigableString. Anyone who's using this feature probably + # wants generic NavigableStrings but not other stuff. + my_type = type(self) + if types is not None: + if isinstance(types, type): + # Looking for a single type. + if my_type is not types: + return + elif my_type not in types: + # Looking for one of a list of types. + return + + value = self + if strip: + value = value.strip() + if len(value) > 0: + yield value + strings = property(_all_strings) + +class PreformattedString(NavigableString): + """A NavigableString not subject to the normal formatting rules. + + This is an abstract class used for special kinds of strings such + as comments (the Comment class) and CDATA blocks (the CData + class). + """ + + PREFIX = '' + SUFFIX = '' + + def output_ready(self, formatter=None): + """Make this string ready for output by adding any subclass-specific + prefix or suffix. + + :param formatter: A Formatter object, or a string naming one + of the standard formatters. The string will be passed into the + Formatter, but only to trigger any side effects: the return + value is ignored. + + :return: The string, with any subclass-specific prefix and + suffix added on. + """ + if formatter is not None: + ignore = self.format_string(self, formatter) + return self.PREFIX + self + self.SUFFIX + +class CData(PreformattedString): + """A CDATA block.""" + PREFIX = '' + +class ProcessingInstruction(PreformattedString): + """A SGML processing instruction.""" + + PREFIX = '' + +class XMLProcessingInstruction(ProcessingInstruction): + """An XML processing instruction.""" + PREFIX = '' + +class Comment(PreformattedString): + """An HTML or XML comment.""" + PREFIX = '' + + +class Declaration(PreformattedString): + """An XML declaration.""" + PREFIX = '' + + +class Doctype(PreformattedString): + """A document type declaration.""" + @classmethod + def for_name_and_ids(cls, name, pub_id, system_id): + """Generate an appropriate document type declaration for a given + public ID and system ID. + + :param name: The name of the document's root element, e.g. 'html'. + :param pub_id: The Formal Public Identifier for this document type, + e.g. '-//W3C//DTD XHTML 1.1//EN' + :param system_id: The system identifier for this document type, + e.g. 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd' + + :return: A Doctype. + """ + value = name or '' + if pub_id is not None: + value += ' PUBLIC "%s"' % pub_id + if system_id is not None: + value += ' "%s"' % system_id + elif system_id is not None: + value += ' SYSTEM "%s"' % system_id + + return Doctype(value) + + PREFIX = '\n' + + +class Stylesheet(NavigableString): + """A NavigableString representing an stylesheet (probably + CSS). + + Used to distinguish embedded stylesheets from textual content. + """ + pass + + +class Script(NavigableString): + """A NavigableString representing an executable script (probably + Javascript). + + Used to distinguish executable code from textual content. + """ + pass + + +class TemplateString(NavigableString): + """A NavigableString representing a string found inside an HTML + template embedded in a larger document. + + Used to distinguish such strings from the main body of the document. + """ + pass + + +class RubyTextString(NavigableString): + """A NavigableString representing the contents of the HTML + element. + + https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rt-element + + Can be used to distinguish such strings from the strings they're + annotating. + """ + pass + + +class RubyParenthesisString(NavigableString): + """A NavigableString representing the contents of the HTML + element. + + https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rp-element + """ + pass + + +class Tag(PageElement): + """Represents an HTML or XML tag that is part of a parse tree, along + with its attributes and contents. + + When Beautiful Soup parses the markup penguin, it will + create a Tag object representing the tag. + """ + + def __init__(self, parser=None, builder=None, name=None, namespace=None, + prefix=None, attrs=None, parent=None, previous=None, + is_xml=None, sourceline=None, sourcepos=None, + can_be_empty_element=None, cdata_list_attributes=None, + preserve_whitespace_tags=None, + interesting_string_types=None, + namespaces=None + ): + """Basic constructor. + + :param parser: A BeautifulSoup object. + :param builder: A TreeBuilder. + :param name: The name of the tag. + :param namespace: The URI of this Tag's XML namespace, if any. + :param prefix: The prefix for this Tag's XML namespace, if any. + :param attrs: A dictionary of this Tag's attribute values. + :param parent: The PageElement to use as this Tag's parent. + :param previous: The PageElement that was parsed immediately before + this tag. + :param is_xml: If True, this is an XML tag. Otherwise, this is an + HTML tag. + :param sourceline: The line number where this tag was found in its + source document. + :param sourcepos: The character position within `sourceline` where this + tag was found. + :param can_be_empty_element: If True, this tag should be + represented as . If False, this tag should be represented + as . + :param cdata_list_attributes: A list of attributes whose values should + be treated as CDATA if they ever show up on this tag. + :param preserve_whitespace_tags: A list of tag names whose contents + should have their whitespace preserved. + :param interesting_string_types: This is a NavigableString + subclass or a tuple of them. When iterating over this + Tag's strings in methods like Tag.strings or Tag.get_text, + these are the types of strings that are interesting enough + to be considered. The default is to consider + NavigableString and CData the only interesting string + subtypes. + :param namespaces: A dictionary mapping currently active + namespace prefixes to URIs. This can be used later to + construct CSS selectors. + """ + if parser is None: + self.parser_class = None + else: + # We don't actually store the parser object: that lets extracted + # chunks be garbage-collected. + self.parser_class = parser.__class__ + if name is None: + raise ValueError("No value provided for new tag's name.") + self.name = name + self.namespace = namespace + self._namespaces = namespaces or {} + self.prefix = prefix + if ((not builder or builder.store_line_numbers) + and (sourceline is not None or sourcepos is not None)): + self.sourceline = sourceline + self.sourcepos = sourcepos + if attrs is None: + attrs = {} + elif attrs: + if builder is not None and builder.cdata_list_attributes: + attrs = builder._replace_cdata_list_attribute_values( + self.name, attrs) + else: + attrs = dict(attrs) + else: + attrs = dict(attrs) + + # If possible, determine ahead of time whether this tag is an + # XML tag. + if builder: + self.known_xml = builder.is_xml + else: + self.known_xml = is_xml + self.attrs = attrs + self.contents = [] + self.setup(parent, previous) + self.hidden = False + + if builder is None: + # In the absence of a TreeBuilder, use whatever values were + # passed in here. They're probably None, unless this is a copy of some + # other tag. + self.can_be_empty_element = can_be_empty_element + self.cdata_list_attributes = cdata_list_attributes + self.preserve_whitespace_tags = preserve_whitespace_tags + self.interesting_string_types = interesting_string_types + else: + # Set up any substitutions for this tag, such as the charset in a META tag. + builder.set_up_substitutions(self) + + # Ask the TreeBuilder whether this tag might be an empty-element tag. + self.can_be_empty_element = builder.can_be_empty_element(name) + + # Keep track of the list of attributes of this tag that + # might need to be treated as a list. + # + # For performance reasons, we store the whole data structure + # rather than asking the question of every tag. Asking would + # require building a new data structure every time, and + # (unlike can_be_empty_element), we almost never need + # to check this. + self.cdata_list_attributes = builder.cdata_list_attributes + + # Keep track of the names that might cause this tag to be treated as a + # whitespace-preserved tag. + self.preserve_whitespace_tags = builder.preserve_whitespace_tags + + if self.name in builder.string_containers: + # This sort of tag uses a special string container + # subclass for most of its strings. When we ask the + self.interesting_string_types = builder.string_containers[self.name] + else: + self.interesting_string_types = self.DEFAULT_INTERESTING_STRING_TYPES + + parserClass = _alias("parser_class") # BS3 + + def __copy__(self): + """A copy of a Tag is a new Tag, unconnected to the parse tree. + Its contents are a copy of the old Tag's contents. + """ + clone = type(self)( + None, self.builder, self.name, self.namespace, + self.prefix, self.attrs, is_xml=self._is_xml, + sourceline=self.sourceline, sourcepos=self.sourcepos, + can_be_empty_element=self.can_be_empty_element, + cdata_list_attributes=self.cdata_list_attributes, + preserve_whitespace_tags=self.preserve_whitespace_tags + ) + for attr in ('can_be_empty_element', 'hidden'): + setattr(clone, attr, getattr(self, attr)) + for child in self.contents: + clone.append(child.__copy__()) + return clone + + @property + def is_empty_element(self): + """Is this tag an empty-element tag? (aka a self-closing tag) + + A tag that has contents is never an empty-element tag. + + A tag that has no contents may or may not be an empty-element + tag. It depends on the builder used to create the tag. If the + builder has a designated list of empty-element tags, then only + a tag whose name shows up in that list is considered an + empty-element tag. + + If the builder has no designated list of empty-element tags, + then any tag with no contents is an empty-element tag. + """ + return len(self.contents) == 0 and self.can_be_empty_element + isSelfClosing = is_empty_element # BS3 + + @property + def string(self): + """Convenience property to get the single string within this + PageElement. + + TODO It might make sense to have NavigableString.string return + itself. + + :return: If this element has a single string child, return + value is that string. If this element has one child tag, + return value is the 'string' attribute of the child tag, + recursively. If this element is itself a string, has no + children, or has more than one child, return value is None. + """ + if len(self.contents) != 1: + return None + child = self.contents[0] + if isinstance(child, NavigableString): + return child + return child.string + + @string.setter + def string(self, string): + """Replace this PageElement's contents with `string`.""" + self.clear() + self.append(string.__class__(string)) + + DEFAULT_INTERESTING_STRING_TYPES = (NavigableString, CData) + def _all_strings(self, strip=False, types=PageElement.default): + """Yield all strings of certain classes, possibly stripping them. + + :param strip: If True, all strings will be stripped before being + yielded. + + :param types: A tuple of NavigableString subclasses. Any strings of + a subclass not found in this list will be ignored. By + default, the subclasses considered are the ones found in + self.interesting_string_types. If that's not specified, + only NavigableString and CData objects will be + considered. That means no comments, processing + instructions, etc. + + :yield: A sequence of strings. + + """ + if types is self.default: + types = self.interesting_string_types + + for descendant in self.descendants: + if (types is None and not isinstance(descendant, NavigableString)): + continue + descendant_type = type(descendant) + if isinstance(types, type): + if descendant_type is not types: + # We're not interested in strings of this type. + continue + elif types is not None and descendant_type not in types: + # We're not interested in strings of this type. + continue + if strip: + descendant = descendant.strip() + if len(descendant) == 0: + continue + yield descendant + strings = property(_all_strings) + + def decompose(self): + """Recursively destroys this PageElement and its children. + + This element will be removed from the tree and wiped out; so + will everything beneath it. + + The behavior of a decomposed PageElement is undefined and you + should never use one for anything, but if you need to _check_ + whether an element has been decomposed, you can use the + `decomposed` property. + """ + self.extract() + i = self + while i is not None: + n = i.next_element + i.__dict__.clear() + i.contents = [] + i._decomposed = True + i = n + + def clear(self, decompose=False): + """Wipe out all children of this PageElement by calling extract() + on them. + + :param decompose: If this is True, decompose() (a more + destructive method) will be called instead of extract(). + """ + if decompose: + for element in self.contents[:]: + if isinstance(element, Tag): + element.decompose() + else: + element.extract() + else: + for element in self.contents[:]: + element.extract() + + def smooth(self): + """Smooth out this element's children by consolidating consecutive + strings. + + This makes pretty-printed output look more natural following a + lot of operations that modified the tree. + """ + # Mark the first position of every pair of children that need + # to be consolidated. Do this rather than making a copy of + # self.contents, since in most cases very few strings will be + # affected. + marked = [] + for i, a in enumerate(self.contents): + if isinstance(a, Tag): + # Recursively smooth children. + a.smooth() + if i == len(self.contents)-1: + # This is the last item in .contents, and it's not a + # tag. There's no chance it needs any work. + continue + b = self.contents[i+1] + if (isinstance(a, NavigableString) + and isinstance(b, NavigableString) + and not isinstance(a, PreformattedString) + and not isinstance(b, PreformattedString) + ): + marked.append(i) + + # Go over the marked positions in reverse order, so that + # removing items from .contents won't affect the remaining + # positions. + for i in reversed(marked): + a = self.contents[i] + b = self.contents[i+1] + b.extract() + n = NavigableString(a+b) + a.replace_with(n) + + def index(self, element): + """Find the index of a child by identity, not value. + + Avoids issues with tag.contents.index(element) getting the + index of equal elements. + + :param element: Look for this PageElement in `self.contents`. + """ + for i, child in enumerate(self.contents): + if child is element: + return i + raise ValueError("Tag.index: element not in tag") + + def get(self, key, default=None): + """Returns the value of the 'key' attribute for the tag, or + the value given for 'default' if it doesn't have that + attribute.""" + return self.attrs.get(key, default) + + def get_attribute_list(self, key, default=None): + """The same as get(), but always returns a list. + + :param key: The attribute to look for. + :param default: Use this value if the attribute is not present + on this PageElement. + :return: A list of values, probably containing only a single + value. + """ + value = self.get(key, default) + if not isinstance(value, list): + value = [value] + return value + + def has_attr(self, key): + """Does this PageElement have an attribute with the given name?""" + return key in self.attrs + + def __hash__(self): + return str(self).__hash__() + + def __getitem__(self, key): + """tag[key] returns the value of the 'key' attribute for the Tag, + and throws an exception if it's not there.""" + return self.attrs[key] + + def __iter__(self): + "Iterating over a Tag iterates over its contents." + return iter(self.contents) + + def __len__(self): + "The length of a Tag is the length of its list of contents." + return len(self.contents) + + def __contains__(self, x): + return x in self.contents + + def __bool__(self): + "A tag is non-None even if it has no contents." + return True + + def __setitem__(self, key, value): + """Setting tag[key] sets the value of the 'key' attribute for the + tag.""" + self.attrs[key] = value + + def __delitem__(self, key): + "Deleting tag[key] deletes all 'key' attributes for the tag." + self.attrs.pop(key, None) + + def __call__(self, *args, **kwargs): + """Calling a Tag like a function is the same as calling its + find_all() method. Eg. tag('a') returns a list of all the A tags + found within this tag.""" + return self.find_all(*args, **kwargs) + + def __getattr__(self, tag): + """Calling tag.subtag is the same as calling tag.find(name="subtag")""" + #print("Getattr %s.%s" % (self.__class__, tag)) + if len(tag) > 3 and tag.endswith('Tag'): + # BS3: soup.aTag -> "soup.find("a") + tag_name = tag[:-3] + warnings.warn( + '.%(name)sTag is deprecated, use .find("%(name)s") instead. If you really were looking for a tag called %(name)sTag, use .find("%(name)sTag")' % dict( + name=tag_name + ), + DeprecationWarning + ) + return self.find(tag_name) + # We special case contents to avoid recursion. + elif not tag.startswith("__") and not tag == "contents": + return self.find(tag) + raise AttributeError( + "'%s' object has no attribute '%s'" % (self.__class__, tag)) + + def __eq__(self, other): + """Returns true iff this Tag has the same name, the same attributes, + and the same contents (recursively) as `other`.""" + if self is other: + return True + if (not hasattr(other, 'name') or + not hasattr(other, 'attrs') or + not hasattr(other, 'contents') or + self.name != other.name or + self.attrs != other.attrs or + len(self) != len(other)): + return False + for i, my_child in enumerate(self.contents): + if my_child != other.contents[i]: + return False + return True + + def __ne__(self, other): + """Returns true iff this Tag is not identical to `other`, + as defined in __eq__.""" + return not self == other + + def __repr__(self, encoding="unicode-escape"): + """Renders this PageElement as a string. + + :param encoding: The encoding to use (Python 2 only). + TODO: This is now ignored and a warning should be issued + if a value is provided. + :return: A (Unicode) string. + """ + # "The return value must be a string object", i.e. Unicode + return self.decode() + + def __unicode__(self): + """Renders this PageElement as a Unicode string.""" + return self.decode() + + __str__ = __repr__ = __unicode__ + + def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, + indent_level=None, formatter="minimal", + errors="xmlcharrefreplace"): + """Render a bytestring representation of this PageElement and its + contents. + + :param encoding: The destination encoding. + :param indent_level: Each line of the rendering will be + indented this many levels. (The formatter decides what a + 'level' means in terms of spaces or other characters + output.) Used internally in recursive calls while + pretty-printing. + :param formatter: A Formatter object, or a string naming one of + the standard formatters. + :param errors: An error handling strategy such as + 'xmlcharrefreplace'. This value is passed along into + encode() and its value should be one of the constants + defined by Python. + :return: A bytestring. + + """ + # Turn the data structure into Unicode, then encode the + # Unicode. + u = self.decode(indent_level, encoding, formatter) + return u.encode(encoding, errors) + + def decode(self, indent_level=None, + eventual_encoding=DEFAULT_OUTPUT_ENCODING, + formatter="minimal"): + """Render a Unicode representation of this PageElement and its + contents. + + :param indent_level: Each line of the rendering will be + indented this many spaces. Used internally in + recursive calls while pretty-printing. + :param eventual_encoding: The tag is destined to be + encoded into this encoding. This method is _not_ + responsible for performing that encoding. This information + is passed in so that it can be substituted in if the + document contains a tag that mentions the document's + encoding. + :param formatter: A Formatter object, or a string naming one of + the standard formatters. + """ + + # First off, turn a non-Formatter `formatter` into a Formatter + # object. This will stop the lookup from happening over and + # over again. + if not isinstance(formatter, Formatter): + formatter = self.formatter_for_name(formatter) + attributes = formatter.attributes(self) + attrs = [] + for key, val in attributes: + if val is None: + decoded = key + else: + if isinstance(val, list) or isinstance(val, tuple): + val = ' '.join(val) + elif not isinstance(val, str): + val = str(val) + elif ( + isinstance(val, AttributeValueWithCharsetSubstitution) + and eventual_encoding is not None + ): + val = val.encode(eventual_encoding) + + text = formatter.attribute_value(val) + decoded = ( + str(key) + '=' + + formatter.quoted_attribute_value(text)) + attrs.append(decoded) + close = '' + closeTag = '' + + prefix = '' + if self.prefix: + prefix = self.prefix + ":" + + if self.is_empty_element: + close = formatter.void_element_close_prefix or '' + else: + closeTag = '' % (prefix, self.name) + + pretty_print = self._should_pretty_print(indent_level) + space = '' + indent_space = '' + if indent_level is not None: + indent_space = (formatter.indent * (indent_level - 1)) + if pretty_print: + space = indent_space + indent_contents = indent_level + 1 + else: + indent_contents = None + contents = self.decode_contents( + indent_contents, eventual_encoding, formatter + ) + + if self.hidden: + # This is the 'document root' object. + s = contents + else: + s = [] + attribute_string = '' + if attrs: + attribute_string = ' ' + ' '.join(attrs) + if indent_level is not None: + # Even if this particular tag is not pretty-printed, + # we should indent up to the start of the tag. + s.append(indent_space) + s.append('<%s%s%s%s>' % ( + prefix, self.name, attribute_string, close)) + if pretty_print: + s.append("\n") + s.append(contents) + if pretty_print and contents and contents[-1] != "\n": + s.append("\n") + if pretty_print and closeTag: + s.append(space) + s.append(closeTag) + if indent_level is not None and closeTag and self.next_sibling: + # Even if this particular tag is not pretty-printed, + # we're now done with the tag, and we should add a + # newline if appropriate. + s.append("\n") + s = ''.join(s) + return s + + def _should_pretty_print(self, indent_level): + """Should this tag be pretty-printed? + + Most of them should, but some (such as
 in HTML
+        documents) should not.
+        """
+        return (
+            indent_level is not None
+            and (
+                not self.preserve_whitespace_tags
+                or self.name not in self.preserve_whitespace_tags
+            )
+        )
+
+    def prettify(self, encoding=None, formatter="minimal"):
+        """Pretty-print this PageElement as a string.
+
+        :param encoding: The eventual encoding of the string. If this is None,
+            a Unicode string will be returned.
+        :param formatter: A Formatter object, or a string naming one of
+            the standard formatters.
+        :return: A Unicode string (if encoding==None) or a bytestring 
+            (otherwise).
+        """
+        if encoding is None:
+            return self.decode(True, formatter=formatter)
+        else:
+            return self.encode(encoding, True, formatter=formatter)
+
+    def decode_contents(self, indent_level=None,
+                       eventual_encoding=DEFAULT_OUTPUT_ENCODING,
+                       formatter="minimal"):
+        """Renders the contents of this tag as a Unicode string.
+
+        :param indent_level: Each line of the rendering will be
+           indented this many levels. (The formatter decides what a
+           'level' means in terms of spaces or other characters
+           output.) Used internally in recursive calls while
+           pretty-printing.
+
+        :param eventual_encoding: The tag is destined to be
+           encoded into this encoding. decode_contents() is _not_
+           responsible for performing that encoding. This information
+           is passed in so that it can be substituted in if the
+           document contains a  tag that mentions the document's
+           encoding.
+
+        :param formatter: A Formatter object, or a string naming one of
+            the standard Formatters.
+
+        """
+        # First off, turn a string formatter into a Formatter object. This
+        # will stop the lookup from happening over and over again.
+        if not isinstance(formatter, Formatter):
+            formatter = self.formatter_for_name(formatter)
+
+        pretty_print = (indent_level is not None)
+        s = []
+        for c in self:
+            text = None
+            if isinstance(c, NavigableString):
+                text = c.output_ready(formatter)
+            elif isinstance(c, Tag):
+                s.append(c.decode(indent_level, eventual_encoding,
+                                  formatter))
+            preserve_whitespace = (
+                self.preserve_whitespace_tags and self.name in self.preserve_whitespace_tags
+            )
+            if text and indent_level and not preserve_whitespace:
+                text = text.strip()
+            if text:
+                if pretty_print and not preserve_whitespace:
+                    s.append(formatter.indent * (indent_level - 1))
+                s.append(text)
+                if pretty_print and not preserve_whitespace:
+                    s.append("\n")
+        return ''.join(s)
+       
+    def encode_contents(
+        self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
+        formatter="minimal"):
+        """Renders the contents of this PageElement as a bytestring.
+
+        :param indent_level: Each line of the rendering will be
+           indented this many levels. (The formatter decides what a
+           'level' means in terms of spaces or other characters
+           output.) Used internally in recursive calls while
+           pretty-printing.
+
+        :param eventual_encoding: The bytestring will be in this encoding.
+
+        :param formatter: A Formatter object, or a string naming one of
+            the standard Formatters.
+
+        :return: A bytestring.
+        """
+        contents = self.decode_contents(indent_level, encoding, formatter)
+        return contents.encode(encoding)
+
+    # Old method for BS3 compatibility
+    def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
+                       prettyPrint=False, indentLevel=0):
+        """Deprecated method for BS3 compatibility."""
+        if not prettyPrint:
+            indentLevel = None
+        return self.encode_contents(
+            indent_level=indentLevel, encoding=encoding)
+
+    #Soup methods
+
+    def find(self, name=None, attrs={}, recursive=True, string=None,
+             **kwargs):
+        """Look in the children of this PageElement and find the first
+        PageElement that matches the given criteria.
+
+        All find_* methods take a common set of arguments. See the online
+        documentation for detailed explanations.
+
+        :param name: A filter on tag name.
+        :param attrs: A dictionary of filters on attribute values.
+        :param recursive: If this is True, find() will perform a
+            recursive search of this PageElement's children. Otherwise,
+            only the direct children will be considered.
+        :param limit: Stop looking after finding this many results.
+        :kwargs: A dictionary of filters on attribute values.
+        :return: A PageElement.
+        :rtype: bs4.element.Tag | bs4.element.NavigableString
+        """
+        r = None
+        l = self.find_all(name, attrs, recursive, string, 1, **kwargs)
+        if l:
+            r = l[0]
+        return r
+    findChild = find #BS2
+
+    def find_all(self, name=None, attrs={}, recursive=True, string=None,
+                 limit=None, **kwargs):
+        """Look in the children of this PageElement and find all
+        PageElements that match the given criteria.
+
+        All find_* methods take a common set of arguments. See the online
+        documentation for detailed explanations.
+
+        :param name: A filter on tag name.
+        :param attrs: A dictionary of filters on attribute values.
+        :param recursive: If this is True, find_all() will perform a
+            recursive search of this PageElement's children. Otherwise,
+            only the direct children will be considered.
+        :param limit: Stop looking after finding this many results.
+        :kwargs: A dictionary of filters on attribute values.
+        :return: A ResultSet of PageElements.
+        :rtype: bs4.element.ResultSet
+        """
+        generator = self.descendants
+        if not recursive:
+            generator = self.children
+        return self._find_all(name, attrs, string, limit, generator, **kwargs)
+    findAll = find_all       # BS3
+    findChildren = find_all  # BS2
+
+    #Generator methods
+    @property
+    def children(self):
+        """Iterate over all direct children of this PageElement.
+
+        :yield: A sequence of PageElements.
+        """
+        # return iter() to make the purpose of the method clear
+        return iter(self.contents)  # XXX This seems to be untested.
+
+    @property
+    def descendants(self):
+        """Iterate over all children of this PageElement in a
+        breadth-first sequence.
+
+        :yield: A sequence of PageElements.
+        """
+        if not len(self.contents):
+            return
+        stopNode = self._last_descendant().next_element
+        current = self.contents[0]
+        while current is not stopNode:
+            yield current
+            current = current.next_element
+
+    # CSS selector code
+    def select_one(self, selector, namespaces=None, **kwargs):
+        """Perform a CSS selection operation on the current element.
+
+        :param selector: A CSS selector.
+
+        :param namespaces: A dictionary mapping namespace prefixes
+           used in the CSS selector to namespace URIs. By default,
+           Beautiful Soup will use the prefixes it encountered while
+           parsing the document.
+
+        :param kwargs: Keyword arguments to be passed into SoupSieve's 
+           soupsieve.select() method.
+
+        :return: A Tag.
+        :rtype: bs4.element.Tag
+        """
+        value = self.select(selector, namespaces, 1, **kwargs)
+        if value:
+            return value[0]
+        return None
+
+    def select(self, selector, namespaces=None, limit=None, **kwargs):
+        """Perform a CSS selection operation on the current element.
+
+        This uses the SoupSieve library.
+
+        :param selector: A string containing a CSS selector.
+
+        :param namespaces: A dictionary mapping namespace prefixes
+           used in the CSS selector to namespace URIs. By default,
+           Beautiful Soup will use the prefixes it encountered while
+           parsing the document.
+
+        :param limit: After finding this number of results, stop looking.
+
+        :param kwargs: Keyword arguments to be passed into SoupSieve's 
+           soupsieve.select() method.
+
+        :return: A ResultSet of Tags.
+        :rtype: bs4.element.ResultSet
+        """
+        if namespaces is None:
+            namespaces = self._namespaces
+        
+        if limit is None:
+            limit = 0
+        if soupsieve is None:
+            raise NotImplementedError(
+                "Cannot execute CSS selectors because the soupsieve package is not installed."
+            )
+            
+        results = soupsieve.select(selector, self, namespaces, limit, **kwargs)
+
+        # We do this because it's more consistent and because
+        # ResultSet.__getattr__ has a helpful error message.
+        return ResultSet(None, results)
+
+    # Old names for backwards compatibility
+    def childGenerator(self):
+        """Deprecated generator."""
+        return self.children
+
+    def recursiveChildGenerator(self):
+        """Deprecated generator."""
+        return self.descendants
+
+    def has_key(self, key):
+        """Deprecated method. This was kind of misleading because has_key()
+        (attributes) was different from __in__ (contents).
+
+        has_key() is gone in Python 3, anyway.
+        """
+        warnings.warn(
+            'has_key is deprecated. Use has_attr(key) instead.',
+            DeprecationWarning
+        )
+        return self.has_attr(key)
+
+# Next, a couple classes to represent queries and their results.
+class SoupStrainer(object):
+    """Encapsulates a number of ways of matching a markup element (tag or
+    string).
+
+    This is primarily used to underpin the find_* methods, but you can
+    create one yourself and pass it in as `parse_only` to the
+    `BeautifulSoup` constructor, to parse a subset of a large
+    document.
+    """
+
+    def __init__(self, name=None, attrs={}, string=None, **kwargs):
+        """Constructor.
+
+        The SoupStrainer constructor takes the same arguments passed
+        into the find_* methods. See the online documentation for
+        detailed explanations.
+
+        :param name: A filter on tag name.
+        :param attrs: A dictionary of filters on attribute values.
+        :param string: A filter for a NavigableString with specific text.
+        :kwargs: A dictionary of filters on attribute values.
+        """        
+        if string is None and 'text' in kwargs:
+            string = kwargs.pop('text')
+            warnings.warn(
+                "The 'text' argument to the SoupStrainer constructor is deprecated. Use 'string' instead.",
+                DeprecationWarning
+            )
+
+        self.name = self._normalize_search_value(name)
+        if not isinstance(attrs, dict):
+            # Treat a non-dict value for attrs as a search for the 'class'
+            # attribute.
+            kwargs['class'] = attrs
+            attrs = None
+
+        if 'class_' in kwargs:
+            # Treat class_="foo" as a search for the 'class'
+            # attribute, overriding any non-dict value for attrs.
+            kwargs['class'] = kwargs['class_']
+            del kwargs['class_']
+
+        if kwargs:
+            if attrs:
+                attrs = attrs.copy()
+                attrs.update(kwargs)
+            else:
+                attrs = kwargs
+        normalized_attrs = {}
+        for key, value in list(attrs.items()):
+            normalized_attrs[key] = self._normalize_search_value(value)
+
+        self.attrs = normalized_attrs
+        self.string = self._normalize_search_value(string)
+
+        # DEPRECATED but just in case someone is checking this.
+        self.text = self.string
+
+    def _normalize_search_value(self, value):
+        # Leave it alone if it's a Unicode string, a callable, a
+        # regular expression, a boolean, or None.
+        if (isinstance(value, str) or isinstance(value, Callable) or hasattr(value, 'match')
+            or isinstance(value, bool) or value is None):
+            return value
+
+        # If it's a bytestring, convert it to Unicode, treating it as UTF-8.
+        if isinstance(value, bytes):
+            return value.decode("utf8")
+
+        # If it's listlike, convert it into a list of strings.
+        if hasattr(value, '__iter__'):
+            new_value = []
+            for v in value:
+                if (hasattr(v, '__iter__') and not isinstance(v, bytes)
+                    and not isinstance(v, str)):
+                    # This is almost certainly the user's mistake. In the
+                    # interests of avoiding infinite loops, we'll let
+                    # it through as-is rather than doing a recursive call.
+                    new_value.append(v)
+                else:
+                    new_value.append(self._normalize_search_value(v))
+            return new_value
+
+        # Otherwise, convert it into a Unicode string.
+        # The unicode(str()) thing is so this will do the same thing on Python 2
+        # and Python 3.
+        return str(str(value))
+
+    def __str__(self):
+        """A human-readable representation of this SoupStrainer."""
+        if self.string:
+            return self.string
+        else:
+            return "%s|%s" % (self.name, self.attrs)
+
+    def search_tag(self, markup_name=None, markup_attrs={}):
+        """Check whether a Tag with the given name and attributes would
+        match this SoupStrainer.
+
+        Used prospectively to decide whether to even bother creating a Tag
+        object.
+
+        :param markup_name: A tag name as found in some markup.
+        :param markup_attrs: A dictionary of attributes as found in some markup.
+
+        :return: True if the prospective tag would match this SoupStrainer;
+            False otherwise.
+        """
+        found = None
+        markup = None
+        if isinstance(markup_name, Tag):
+            markup = markup_name
+            markup_attrs = markup
+
+        if isinstance(self.name, str):
+            # Optimization for a very common case where the user is
+            # searching for a tag with one specific name, and we're
+            # looking at a tag with a different name.
+            if markup and not markup.prefix and self.name != markup.name:
+                 return False
+            
+        call_function_with_tag_data = (
+            isinstance(self.name, Callable)
+            and not isinstance(markup_name, Tag))
+
+        if ((not self.name)
+            or call_function_with_tag_data
+            or (markup and self._matches(markup, self.name))
+            or (not markup and self._matches(markup_name, self.name))):
+            if call_function_with_tag_data:
+                match = self.name(markup_name, markup_attrs)
+            else:
+                match = True
+                markup_attr_map = None
+                for attr, match_against in list(self.attrs.items()):
+                    if not markup_attr_map:
+                        if hasattr(markup_attrs, 'get'):
+                            markup_attr_map = markup_attrs
+                        else:
+                            markup_attr_map = {}
+                            for k, v in markup_attrs:
+                                markup_attr_map[k] = v
+                    attr_value = markup_attr_map.get(attr)
+                    if not self._matches(attr_value, match_against):
+                        match = False
+                        break
+            if match:
+                if markup:
+                    found = markup
+                else:
+                    found = markup_name
+        if found and self.string and not self._matches(found.string, self.string):
+            found = None
+        return found
+
+    # For BS3 compatibility.
+    searchTag = search_tag
+
+    def search(self, markup):
+        """Find all items in `markup` that match this SoupStrainer.
+
+        Used by the core _find_all() method, which is ultimately
+        called by all find_* methods.
+
+        :param markup: A PageElement or a list of them.
+        """
+        # print('looking for %s in %s' % (self, markup))
+        found = None
+        # If given a list of items, scan it for a text element that
+        # matches.
+        if hasattr(markup, '__iter__') and not isinstance(markup, (Tag, str)):
+            for element in markup:
+                if isinstance(element, NavigableString) \
+                       and self.search(element):
+                    found = element
+                    break
+        # If it's a Tag, make sure its name or attributes match.
+        # Don't bother with Tags if we're searching for text.
+        elif isinstance(markup, Tag):
+            if not self.string or self.name or self.attrs:
+                found = self.search_tag(markup)
+        # If it's text, make sure the text matches.
+        elif isinstance(markup, NavigableString) or \
+                 isinstance(markup, str):
+            if not self.name and not self.attrs and self._matches(markup, self.string):
+                found = markup
+        else:
+            raise Exception(
+                "I don't know how to match against a %s" % markup.__class__)
+        return found
+
+    def _matches(self, markup, match_against, already_tried=None):
+        # print(u"Matching %s against %s" % (markup, match_against))
+        result = False
+        if isinstance(markup, list) or isinstance(markup, tuple):
+            # This should only happen when searching a multi-valued attribute
+            # like 'class'.
+            for item in markup:
+                if self._matches(item, match_against):
+                    return True
+            # We didn't match any particular value of the multivalue
+            # attribute, but maybe we match the attribute value when
+            # considered as a string.
+            if self._matches(' '.join(markup), match_against):
+                return True
+            return False
+        
+        if match_against is True:
+            # True matches any non-None value.
+            return markup is not None
+
+        if isinstance(match_against, Callable):
+            return match_against(markup)
+
+        # Custom callables take the tag as an argument, but all
+        # other ways of matching match the tag name as a string.
+        original_markup = markup
+        if isinstance(markup, Tag):
+            markup = markup.name
+
+        # Ensure that `markup` is either a Unicode string, or None.
+        markup = self._normalize_search_value(markup)
+
+        if markup is None:
+            # None matches None, False, an empty string, an empty list, and so on.
+            return not match_against
+
+        if (hasattr(match_against, '__iter__')
+            and not isinstance(match_against, str)):
+            # We're asked to match against an iterable of items.
+            # The markup must be match at least one item in the
+            # iterable. We'll try each one in turn.
+            #
+            # To avoid infinite recursion we need to keep track of
+            # items we've already seen.
+            if not already_tried:
+                already_tried = set()
+            for item in match_against:
+                if item.__hash__:
+                    key = item
+                else:
+                    key = id(item)
+                if key in already_tried:
+                    continue
+                else:
+                    already_tried.add(key)
+                    if self._matches(original_markup, item, already_tried):
+                        return True
+            else:
+                return False
+        
+        # Beyond this point we might need to run the test twice: once against
+        # the tag's name and once against its prefixed name.
+        match = False
+        
+        if not match and isinstance(match_against, str):
+            # Exact string match
+            match = markup == match_against
+
+        if not match and hasattr(match_against, 'search'):
+            # Regexp match
+            return match_against.search(markup)
+
+        if (not match
+            and isinstance(original_markup, Tag)
+            and original_markup.prefix):
+            # Try the whole thing again with the prefixed tag name.
+            return self._matches(
+                original_markup.prefix + ':' + original_markup.name, match_against
+            )
+
+        return match
+
+
+class ResultSet(list):
+    """A ResultSet is just a list that keeps track of the SoupStrainer
+    that created it."""
+    def __init__(self, source, result=()):
+        """Constructor.
+
+        :param source: A SoupStrainer.
+        :param result: A list of PageElements.
+        """
+        super(ResultSet, self).__init__(result)
+        self.source = source
+
+    def __getattr__(self, key):
+        """Raise a helpful exception to explain a common code fix."""
+        raise AttributeError(
+            "ResultSet object has no attribute '%s'. You're probably treating a list of elements like a single element. Did you call find_all() when you meant to call find()?" % key
+        )
diff --git a/sbsheriff/Lib/site-packages/bs4/formatter.py b/sbsheriff/Lib/site-packages/bs4/formatter.py
new file mode 100644
index 0000000..65e57b5
--- /dev/null
+++ b/sbsheriff/Lib/site-packages/bs4/formatter.py
@@ -0,0 +1,185 @@
+from bs4.dammit import EntitySubstitution
+
+class Formatter(EntitySubstitution):
+    """Describes a strategy to use when outputting a parse tree to a string.
+
+    Some parts of this strategy come from the distinction between
+    HTML4, HTML5, and XML. Others are configurable by the user.
+
+    Formatters are passed in as the `formatter` argument to methods
+    like `PageElement.encode`. Most people won't need to think about
+    formatters, and most people who need to think about them can pass
+    in one of these predefined strings as `formatter` rather than
+    making a new Formatter object:
+
+    For HTML documents:
+     * 'html' - HTML entity substitution for generic HTML documents. (default)
+     * 'html5' - HTML entity substitution for HTML5 documents, as
+                 well as some optimizations in the way tags are rendered.
+     * 'minimal' - Only make the substitutions necessary to guarantee
+                   valid HTML.
+     * None - Do not perform any substitution. This will be faster
+              but may result in invalid markup.
+
+    For XML documents:
+     * 'html' - Entity substitution for XHTML documents.
+     * 'minimal' - Only make the substitutions necessary to guarantee
+                   valid XML. (default)
+     * None - Do not perform any substitution. This will be faster
+              but may result in invalid markup.
+    """
+    # Registries of XML and HTML formatters.
+    XML_FORMATTERS = {}
+    HTML_FORMATTERS = {}
+
+    HTML = 'html'
+    XML = 'xml'
+
+    HTML_DEFAULTS = dict(
+        cdata_containing_tags=set(["script", "style"]),
+    )
+
+    def _default(self, language, value, kwarg):
+        if value is not None:
+            return value
+        if language == self.XML:
+            return set()
+        return self.HTML_DEFAULTS[kwarg]
+
+    def __init__(
+            self, language=None, entity_substitution=None,
+            void_element_close_prefix='/', cdata_containing_tags=None,
+            empty_attributes_are_booleans=False, indent=1,
+    ):
+        """Constructor.
+
+        :param language: This should be Formatter.XML if you are formatting
+           XML markup and Formatter.HTML if you are formatting HTML markup.
+
+        :param entity_substitution: A function to call to replace special
+           characters with XML/HTML entities. For examples, see 
+           bs4.dammit.EntitySubstitution.substitute_html and substitute_xml.
+        :param void_element_close_prefix: By default, void elements
+           are represented as  (XML rules) rather than 
+           (HTML rules). To get , pass in the empty string.
+        :param cdata_containing_tags: The list of tags that are defined
+           as containing CDATA in this dialect. For example, in HTML,
+           
+
This numeric entity is missing the final semicolon:
+ +
a
+
This document contains (do you see it?)
+
This document ends with That attribute value was bogus
+The doctype is invalid because it contains extra whitespace +
That boolean attribute had no value
+
Here's a nonexistent entity: &#foo; (do you see it?)
+
This document ends before the entity finishes: > +

Paragraphs shouldn't contain block display elements, but this one does:

you see?

+Multiple values for the same attribute. +
Here's a table
+
+
This tag contains nothing but whitespace:
+

This p tag is cut off by

the end of the blockquote tag
+
Here's a nested table:
foo
This table contains bare markup
+ +
This document contains a surprise doctype
+ +
Tag name contains Unicode characters
+ + +""" + + +class SoupTest(object): + + @property + def default_builder(self): + return default_builder + + def soup(self, markup, **kwargs): + """Build a Beautiful Soup object from markup.""" + builder = kwargs.pop('builder', self.default_builder) + return BeautifulSoup(markup, builder=builder, **kwargs) + + def document_for(self, markup, **kwargs): + """Turn an HTML fragment into a document. + + The details depend on the builder. + """ + return self.default_builder(**kwargs).test_fragment_to_document(markup) + + def assert_soup(self, to_parse, compare_parsed_to=None): + """Parse some markup using Beautiful Soup and verify that + the output markup is as expected. + """ + builder = self.default_builder + obj = BeautifulSoup(to_parse, builder=builder) + if compare_parsed_to is None: + compare_parsed_to = to_parse + + # Verify that the documents come out the same. + assert obj.decode() == self.document_for(compare_parsed_to) + + # Also run some checks on the BeautifulSoup object itself: + + # Verify that every tag that was opened was eventually closed. + + # There are no tags in the open tag counter. + assert all(v==0 for v in list(obj.open_tag_counter.values())) + + # The only tag in the tag stack is the one for the root + # document. + assert [obj.ROOT_TAG_NAME] == [x.name for x in obj.tagStack] + + assertSoupEquals = assert_soup + + def assertConnectedness(self, element): + """Ensure that next_element and previous_element are properly + set for all descendants of the given element. + """ + earlier = None + for e in element.descendants: + if earlier: + assert e == earlier.next_element + assert earlier == e.previous_element + earlier = e + + def linkage_validator(self, el, _recursive_call=False): + """Ensure proper linkage throughout the document.""" + descendant = None + # Document element should have no previous element or previous sibling. + # It also shouldn't have a next sibling. + if el.parent is None: + assert el.previous_element is None,\ + "Bad previous_element\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + el, el.previous_element, None + ) + assert el.previous_sibling is None,\ + "Bad previous_sibling\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + el, el.previous_sibling, None + ) + assert el.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT: {}\nEXPECTED: {}".format( + el, el.next_sibling, None + ) + + idx = 0 + child = None + last_child = None + last_idx = len(el.contents) - 1 + for child in el.contents: + descendant = None + + # Parent should link next element to their first child + # That child should have no previous sibling + if idx == 0: + if el.parent is not None: + assert el.next_element is child,\ + "Bad next_element\nNODE: {}\nNEXT: {}\nEXPECTED: {}".format( + el, el.next_element, child + ) + assert child.previous_element is el,\ + "Bad previous_element\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + child, child.previous_element, el + ) + assert child.previous_sibling is None,\ + "Bad previous_sibling\nNODE: {}\nPREV {}\nEXPECTED: {}".format( + child, child.previous_sibling, None + ) + + # If not the first child, previous index should link as sibling to this index + # Previous element should match the last index or the last bubbled up descendant + else: + assert child.previous_sibling is el.contents[idx - 1],\ + "Bad previous_sibling\nNODE: {}\nPREV {}\nEXPECTED {}".format( + child, child.previous_sibling, el.contents[idx - 1] + ) + assert el.contents[idx - 1].next_sibling is child,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + el.contents[idx - 1], el.contents[idx - 1].next_sibling, child + ) + + if last_child is not None: + assert child.previous_element is last_child,\ + "Bad previous_element\nNODE: {}\nPREV {}\nEXPECTED {}\nCONTENTS {}".format( + child, child.previous_element, last_child, child.parent.contents + ) + assert last_child.next_element is child,\ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + last_child, last_child.next_element, child + ) + + if isinstance(child, Tag) and child.contents: + descendant = self.linkage_validator(child, True) + # A bubbled up descendant should have no next siblings + assert descendant.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + descendant, descendant.next_sibling, None + ) + + # Mark last child as either the bubbled up descendant or the current child + if descendant is not None: + last_child = descendant + else: + last_child = child + + # If last child, there are non next siblings + if idx == last_idx: + assert child.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_sibling, None + ) + idx += 1 + + child = descendant if descendant is not None else child + if child is None: + child = el + + if not _recursive_call and child is not None: + target = el + while True: + if target is None: + assert child.next_element is None, \ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_element, None + ) + break + elif target.next_sibling is not None: + assert child.next_element is target.next_sibling, \ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_element, target.next_sibling + ) + break + target = target.parent + + # We are done, so nothing to return + return None + else: + # Return the child to the recursive caller + return child + + def assert_selects(self, tags, should_match): + """Make sure that the given tags have the correct text. + + This is used in tests that define a bunch of tags, each + containing a single string, and then select certain strings by + some mechanism. + """ + assert [tag.string for tag in tags] == should_match + + def assert_selects_ids(self, tags, should_match): + """Make sure that the given tags have the correct IDs. + + This is used in tests that define a bunch of tags, each + containing a single string, and then select certain strings by + some mechanism. + """ + assert [tag['id'] for tag in tags] == should_match + + +class TreeBuilderSmokeTest(object): + # Tests that are common to HTML and XML tree builders. + + @pytest.mark.parametrize( + "multi_valued_attributes", + [None, dict(b=['class']), {'*': ['notclass']}] + ) + def test_attribute_not_multi_valued(self, multi_valued_attributes): + markup = '' + soup = self.soup(markup, multi_valued_attributes=multi_valued_attributes) + assert soup.a['class'] == 'a b c' + + @pytest.mark.parametrize( + "multi_valued_attributes", [dict(a=['class']), {'*': ['class']}] + ) + def test_attribute_multi_valued(self, multi_valued_attributes): + markup = '' + soup = self.soup( + markup, multi_valued_attributes=multi_valued_attributes + ) + assert soup.a['class'] == ['a', 'b', 'c'] + + def test_fuzzed_input(self): + # This test centralizes in one place the various fuzz tests + # for Beautiful Soup created by the oss-fuzz project. + + # These strings superficially resemble markup, but they + # generally can't be parsed into anything. The best we can + # hope for is that parsing these strings won't crash the + # parser. + # + # n.b. This markup is commented out because these fuzz tests + # _do_ crash the parser. However the crashes are due to bugs + # in html.parser, not Beautiful Soup -- otherwise I'd fix the + # bugs! + + bad_markup = [ + # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=28873 + # https://github.com/guidovranken/python-library-fuzzers/blob/master/corp-html/519e5b4269a01185a0d5e76295251921da2f0700 + # https://bugs.python.org/issue37747 + # + #b'\nSome CSS" + ) + assert isinstance(soup.style.string, Stylesheet) + assert isinstance(soup.script.string, Script) + + soup = self.soup( + "" + ) + assert isinstance(soup.style.string, Stylesheet) + # The contents of the style tag resemble an HTML comment, but + # it's not treated as a comment. + assert soup.style.string == "" + assert isinstance(soup.style.string, Stylesheet) + + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("foo") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + assert loaded.__class__ == BeautifulSoup + assert loaded.decode() == tree.decode() + + def assertDoctypeHandled(self, doctype_fragment): + """Assert that a given doctype string is handled correctly.""" + doctype_str, soup = self._document_with_doctype(doctype_fragment) + + # Make sure a Doctype object was created. + doctype = soup.contents[0] + assert doctype.__class__ == Doctype + assert doctype == doctype_fragment + assert soup.encode("utf8")[:len(doctype_str)] == doctype_str + + # Make sure that the doctype was correctly associated with the + # parse tree and that the rest of the document parsed. + assert soup.p.contents[0] == 'foo' + + def _document_with_doctype(self, doctype_fragment, doctype_string="DOCTYPE"): + """Generate and parse a document with the given doctype.""" + doctype = '' % (doctype_string, doctype_fragment) + markup = doctype + '\n

foo

' + soup = self.soup(markup) + return doctype.encode("utf8"), soup + + def test_normal_doctypes(self): + """Make sure normal, everyday HTML doctypes are handled correctly.""" + self.assertDoctypeHandled("html") + self.assertDoctypeHandled( + 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"') + + def test_empty_doctype(self): + soup = self.soup("") + doctype = soup.contents[0] + assert "" == doctype.strip() + + def test_mixed_case_doctype(self): + # A lowercase or mixed-case doctype becomes a Doctype. + for doctype_fragment in ("doctype", "DocType"): + doctype_str, soup = self._document_with_doctype( + "html", doctype_fragment + ) + + # Make sure a Doctype object was created and that the DOCTYPE + # is uppercase. + doctype = soup.contents[0] + assert doctype.__class__ == Doctype + assert doctype == "html" + assert soup.encode("utf8")[:len(doctype_str)] == b"" + + # Make sure that the doctype was correctly associated with the + # parse tree and that the rest of the document parsed. + assert soup.p.contents[0] == 'foo' + + def test_public_doctype_with_url(self): + doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"' + self.assertDoctypeHandled(doctype) + + def test_system_doctype(self): + self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"') + + def test_namespaced_system_doctype(self): + # We can handle a namespaced doctype with a system ID. + self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"') + + def test_namespaced_public_doctype(self): + # Test a namespaced doctype with a public id. + self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"') + + def test_real_xhtml_document(self): + """A real XHTML document should come out more or less the same as it went in.""" + markup = b""" + + +Hello. +Goodbye. +""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + assert soup.encode("utf-8").replace(b"\n", b"") == markup.replace(b"\n", b"") + + # No warning was issued about parsing an XML document as HTML, + # because XHTML is both. + assert w == [] + + + def test_namespaced_html(self): + # When a namespaced XML document is parsed as HTML it should + # be treated as HTML with weird tag names. + markup = b"""content""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + + assert 2 == len(soup.find_all("ns1:foo")) + + # n.b. no "you're parsing XML as HTML" warning was given + # because there was no XML declaration. + assert [] == w + + def test_detect_xml_parsed_as_html(self): + # A warning is issued when parsing an XML document as HTML, + # but basic stuff should still work. + markup = b"""string""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + assert soup.tag.string == 'string' + [warning] = w + assert isinstance(warning.message, XMLParsedAsHTMLWarning) + assert str(warning.message) == XMLParsedAsHTMLWarning.MESSAGE + + # NOTE: the warning is not issued if the document appears to + # be XHTML (tested with test_real_xhtml_document in the + # superclass) or if there is no XML declaration (tested with + # test_namespaced_html in the superclass). + + def test_processing_instruction(self): + # We test both Unicode and bytestring to verify that + # process_markup correctly sets processing_instruction_class + # even when the markup is already Unicode and there is no + # need to process anything. + markup = """""" + soup = self.soup(markup) + assert markup == soup.decode() + + markup = b"""""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_deepcopy(self): + """Make sure you can copy the tree builder. + + This is important because the builder is part of a + BeautifulSoup object, and we want to be able to copy that. + """ + copy.deepcopy(self.default_builder) + + def test_p_tag_is_never_empty_element(self): + """A

tag is never designated as an empty-element tag. + + Even if the markup shows it as an empty-element tag, it + shouldn't be presented that way. + """ + soup = self.soup("

") + assert not soup.p.is_empty_element + assert str(soup.p) == "

" + + def test_unclosed_tags_get_closed(self): + """A tag that's not closed by the end of the document should be closed. + + This applies to all tags except empty-element tags. + """ + self.assert_soup("

", "

") + self.assert_soup("", "") + + self.assert_soup("
", "
") + + def test_br_is_always_empty_element_tag(self): + """A
tag is designated as an empty-element tag. + + Some parsers treat

as one
tag, some parsers as + two tags, but it should always be an empty-element tag. + """ + soup = self.soup("

") + assert soup.br.is_empty_element + assert str(soup.br) == "
" + + def test_nested_formatting_elements(self): + self.assert_soup("") + + def test_double_head(self): + html = ''' + + +Ordinary HEAD element test + + + +Hello, world! + + +''' + soup = self.soup(html) + assert "text/javascript" == soup.find('script')['type'] + + def test_comment(self): + # Comments are represented as Comment objects. + markup = "

foobaz

" + self.assert_soup(markup) + + soup = self.soup(markup) + comment = soup.find(string="foobar") + assert comment.__class__ == Comment + + # The comment is properly integrated into the tree. + foo = soup.find(string="foo") + assert comment == foo.next_element + baz = soup.find(string="baz") + assert comment == baz.previous_element + + def test_preserved_whitespace_in_pre_and_textarea(self): + """Whitespace must be preserved in
 and "
+        self.assert_soup(pre_markup)
+        self.assert_soup(textarea_markup)
+
+        soup = self.soup(pre_markup)
+        assert soup.pre.prettify() == pre_markup
+
+        soup = self.soup(textarea_markup)
+        assert soup.textarea.prettify() == textarea_markup
+
+        soup = self.soup("")
+        assert soup.textarea.prettify() == ""
+
+    def test_nested_inline_elements(self):
+        """Inline elements can be nested indefinitely."""
+        b_tag = "Inside a B tag"
+        self.assert_soup(b_tag)
+
+        nested_b_tag = "

A nested tag

" + self.assert_soup(nested_b_tag) + + double_nested_b_tag = "

A doubly nested tag

" + self.assert_soup(nested_b_tag) + + def test_nested_block_level_elements(self): + """Block elements can be nested.""" + soup = self.soup('

Foo

') + blockquote = soup.blockquote + assert blockquote.p.b.string == 'Foo' + assert blockquote.b.string == 'Foo' + + def test_correctly_nested_tables(self): + """One table can go inside another one.""" + markup = ('' + '' + "') + + self.assert_soup( + markup, + '
Here's another table:" + '' + '' + '
foo
Here\'s another table:' + '
foo
' + '
') + + self.assert_soup( + "" + "" + "
Foo
Bar
Baz
") + + def test_multivalued_attribute_with_whitespace(self): + # Whitespace separating the values of a multi-valued attribute + # should be ignored. + + markup = '
' + soup = self.soup(markup) + assert ['foo', 'bar'] == soup.div['class'] + + # If you search by the literal name of the class it's like the whitespace + # wasn't there. + assert soup.div == soup.find('div', class_="foo bar") + + def test_deeply_nested_multivalued_attribute(self): + # html5lib can set the attributes of the same tag many times + # as it rearranges the tree. This has caused problems with + # multivalued attributes. + markup = '
' + soup = self.soup(markup) + assert ["css"] == soup.div.div['class'] + + def test_multivalued_attribute_on_html(self): + # html5lib uses a different API to set the attributes ot the + # tag. This has caused problems with multivalued + # attributes. + markup = '' + soup = self.soup(markup) + assert ["a", "b"] == soup.html['class'] + + def test_angle_brackets_in_attribute_values_are_escaped(self): + self.assert_soup('', '') + + def test_strings_resembling_character_entity_references(self): + # "&T" and "&p" look like incomplete character entities, but they are + # not. + self.assert_soup( + "

• AT&T is in the s&p 500

", + "

\u2022 AT&T is in the s&p 500

" + ) + + def test_apos_entity(self): + self.assert_soup( + "

Bob's Bar

", + "

Bob's Bar

", + ) + + def test_entities_in_foreign_document_encoding(self): + # “ and ” are invalid numeric entities referencing + # Windows-1252 characters. - references a character common + # to Windows-1252 and Unicode, and ☃ references a + # character only found in Unicode. + # + # All of these entities should be converted to Unicode + # characters. + markup = "

“Hello” -☃

" + soup = self.soup(markup) + assert "“Hello” -☃" == soup.p.string + + def test_entities_in_attributes_converted_to_unicode(self): + expect = '

' + self.assert_soup('

', expect) + self.assert_soup('

', expect) + self.assert_soup('

', expect) + self.assert_soup('

', expect) + + def test_entities_in_text_converted_to_unicode(self): + expect = '

pi\N{LATIN SMALL LETTER N WITH TILDE}ata

' + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + + def test_quot_entity_converted_to_quotation_mark(self): + self.assert_soup("

I said "good day!"

", + '

I said "good day!"

') + + def test_out_of_range_entity(self): + expect = "\N{REPLACEMENT CHARACTER}" + self.assert_soup("�", expect) + self.assert_soup("�", expect) + self.assert_soup("�", expect) + + def test_multipart_strings(self): + "Mostly to prevent a recurrence of a bug in the html5lib treebuilder." + soup = self.soup("

\nfoo

") + assert "p" == soup.h2.string.next_element.name + assert "p" == soup.p.name + self.assertConnectedness(soup) + + def test_empty_element_tags(self): + """Verify consistent handling of empty-element tags, + no matter how they come in through the markup. + """ + self.assert_soup('


', "


") + self.assert_soup('


', "


") + + def test_head_tag_between_head_and_body(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """ + + foo + +""" + soup = self.soup(content) + assert soup.html.body is not None + self.assertConnectedness(soup) + + def test_multiple_copies_of_a_tag(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """ + + + + + +""" + soup = self.soup(content) + self.assertConnectedness(soup.article) + + def test_basic_namespaces(self): + """Parsers don't need to *understand* namespaces, but at the + very least they should not choke on namespaces or lose + data.""" + + markup = b'4' + soup = self.soup(markup) + assert markup == soup.encode() + html = soup.html + assert 'http://www.w3.org/1999/xhtml' == soup.html['xmlns'] + assert 'http://www.w3.org/1998/Math/MathML' == soup.html['xmlns:mathml'] + assert 'http://www.w3.org/2000/svg' == soup.html['xmlns:svg'] + + def test_multivalued_attribute_value_becomes_list(self): + markup = b'' + soup = self.soup(markup) + assert ['foo', 'bar'] == soup.a['class'] + + # + # Generally speaking, tests below this point are more tests of + # Beautiful Soup than tests of the tree builders. But parsers are + # weird, so we run these tests separately for every tree builder + # to detect any differences between them. + # + + def test_can_parse_unicode_document(self): + # A seemingly innocuous document... but it's in Unicode! And + # it contains characters that can't be represented in the + # encoding found in the declaration! The horror! + markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.body.string + + def test_soupstrainer(self): + """Parsers should be able to work with SoupStrainers.""" + strainer = SoupStrainer("b") + soup = self.soup("A bold statement", + parse_only=strainer) + assert soup.decode() == "bold" + + def test_single_quote_attribute_values_become_double_quotes(self): + self.assert_soup("", + '') + + def test_attribute_values_with_nested_quotes_are_left_alone(self): + text = """a""" + self.assert_soup(text) + + def test_attribute_values_with_double_nested_quotes_get_quoted(self): + text = """a""" + soup = self.soup(text) + soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"' + self.assert_soup( + soup.foo.decode(), + """a""") + + def test_ampersand_in_attribute_value_gets_escaped(self): + self.assert_soup('', + '') + + self.assert_soup( + 'foo', + 'foo') + + def test_escaped_ampersand_in_attribute_value_is_left_alone(self): + self.assert_soup('') + + def test_entities_in_strings_converted_during_parsing(self): + # Both XML and HTML entities are converted to Unicode characters + # during parsing. + text = "

<<sacré bleu!>>

" + expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

" + self.assert_soup(text, expected) + + def test_smart_quotes_converted_on_the_way_in(self): + # Microsoft smart quotes are converted to Unicode characters during + # parsing. + quote = b"

\x91Foo\x92

" + soup = self.soup(quote) + assert soup.p.string == "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}" + + def test_non_breaking_spaces_converted_on_the_way_in(self): + soup = self.soup("  ") + assert soup.a.string == "\N{NO-BREAK SPACE}" * 2 + + def test_entities_converted_on_the_way_out(self): + text = "

<<sacré bleu!>>

" + expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

".encode("utf-8") + soup = self.soup(text) + assert soup.p.encode("utf-8") == expected + + def test_real_iso_latin_document(self): + # Smoke test of interrelated functionality, using an + # easy-to-understand document. + + # Here it is in Unicode. Note that it claims to be in ISO-Latin-1. + unicode_html = '

Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!

' + + # That's because we're going to encode it into ISO-Latin-1, and use + # that to test. + iso_latin_html = unicode_html.encode("iso-8859-1") + + # Parse the ISO-Latin-1 HTML. + soup = self.soup(iso_latin_html) + # Encode it to UTF-8. + result = soup.encode("utf-8") + + # What do we expect the result to look like? Well, it would + # look like unicode_html, except that the META tag would say + # UTF-8 instead of ISO-Latin-1. + expected = unicode_html.replace("ISO-Latin-1", "utf-8") + + # And, of course, it would be in UTF-8, not Unicode. + expected = expected.encode("utf-8") + + # Ta-da! + assert result == expected + + def test_real_shift_jis_document(self): + # Smoke test to make sure the parser can handle a document in + # Shift-JIS encoding, without choking. + shift_jis_html = ( + b'
'
+            b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
+            b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
+            b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
+            b'
') + unicode_html = shift_jis_html.decode("shift-jis") + soup = self.soup(unicode_html) + + # Make sure the parse tree is correctly encoded to various + # encodings. + assert soup.encode("utf-8") == unicode_html.encode("utf-8") + assert soup.encode("euc_jp") == unicode_html.encode("euc_jp") + + def test_real_hebrew_document(self): + # A real-world test to make sure we can convert ISO-8859-9 (a + # Hebrew encoding) to UTF-8. + hebrew_document = b'Hebrew (ISO 8859-8) in Visual Directionality

Hebrew (ISO 8859-8) in Visual Directionality

\xed\xe5\xec\xf9' + soup = self.soup( + hebrew_document, from_encoding="iso8859-8") + # Some tree builders call it iso8859-8, others call it iso-8859-9. + # That's not a difference we really care about. + assert soup.original_encoding in ('iso8859-8', 'iso-8859-8') + assert soup.encode('utf-8') == ( + hebrew_document.decode("iso8859-8").encode("utf-8") + ) + + def test_meta_tag_reflects_current_encoding(self): + # Here's the tag saying that a document is + # encoded in Shift-JIS. + meta_tag = ('') + + # Here's a document incorporating that meta tag. + shift_jis_html = ( + '\n%s\n' + '' + 'Shift-JIS markup goes here.') % meta_tag + soup = self.soup(shift_jis_html) + + # Parse the document, and the charset is seemingly unaffected. + parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'}) + content = parsed_meta['content'] + assert 'text/html; charset=x-sjis' == content + + # But that value is actually a ContentMetaAttributeValue object. + assert isinstance(content, ContentMetaAttributeValue) + + # And it will take on a value that reflects its current + # encoding. + assert 'text/html; charset=utf8' == content.encode("utf8") + + # For the rest of the story, see TestSubstitutions in + # test_tree.py. + + def test_html5_style_meta_tag_reflects_current_encoding(self): + # Here's the tag saying that a document is + # encoded in Shift-JIS. + meta_tag = ('') + + # Here's a document incorporating that meta tag. + shift_jis_html = ( + '\n%s\n' + '' + 'Shift-JIS markup goes here.') % meta_tag + soup = self.soup(shift_jis_html) + + # Parse the document, and the charset is seemingly unaffected. + parsed_meta = soup.find('meta', id="encoding") + charset = parsed_meta['charset'] + assert 'x-sjis' == charset + + # But that value is actually a CharsetMetaAttributeValue object. + assert isinstance(charset, CharsetMetaAttributeValue) + + # And it will take on a value that reflects its current + # encoding. + assert 'utf8' == charset.encode("utf8") + + def test_python_specific_encodings_not_used_in_charset(self): + # You can encode an HTML document using a Python-specific + # encoding, but that encoding won't be mentioned _inside_ the + # resulting document. Instead, the document will appear to + # have no encoding. + for markup in [ + b'' + b'' + ]: + soup = self.soup(markup) + for encoding in PYTHON_SPECIFIC_ENCODINGS: + if encoding in ( + 'idna', 'mbcs', 'oem', 'undefined', + 'string_escape', 'string-escape' + ): + # For one reason or another, these will raise an + # exception if we actually try to use them, so don't + # bother. + continue + encoded = soup.encode(encoding) + assert b'meta charset=""' in encoded + assert encoding.encode("ascii") not in encoded + + def test_tag_with_no_attributes_can_have_attributes_added(self): + data = self.soup("text") + data.a['foo'] = 'bar' + assert 'text' == data.a.decode() + + def test_closing_tag_with_no_opening_tag(self): + # Without BeautifulSoup.open_tag_counter, the tag will + # cause _popToTag to be called over and over again as we look + # for a tag that wasn't there. The result is that 'text2' + # will show up outside the body of the document. + soup = self.soup("

text1

text2
") + assert "

text1

text2
" == soup.body.decode() + + def test_worst_case(self): + """Test the worst case (currently) for linking issues.""" + + soup = self.soup(BAD_DOCUMENT) + self.linkage_validator(soup) + + +class XMLTreeBuilderSmokeTest(TreeBuilderSmokeTest): + + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("foo") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + assert loaded.__class__ == BeautifulSoup + assert loaded.decode() == tree.decode() + + def test_docstring_generated(self): + soup = self.soup("") + assert soup.encode() == b'\n' + + def test_xml_declaration(self): + markup = b"""\n""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_python_specific_encodings_not_used_in_xml_declaration(self): + # You can encode an XML document using a Python-specific + # encoding, but that encoding won't be mentioned _inside_ the + # resulting document. + markup = b"""\n""" + soup = self.soup(markup) + for encoding in PYTHON_SPECIFIC_ENCODINGS: + if encoding in ( + 'idna', 'mbcs', 'oem', 'undefined', + 'string_escape', 'string-escape' + ): + # For one reason or another, these will raise an + # exception if we actually try to use them, so don't + # bother. + continue + encoded = soup.encode(encoding) + assert b'' in encoded + assert encoding.encode("ascii") not in encoded + + def test_processing_instruction(self): + markup = b"""\n""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_real_xhtml_document(self): + """A real XHTML document should come out *exactly* the same as it went in.""" + markup = b""" + + +Hello. +Goodbye. +""" + soup = self.soup(markup) + assert soup.encode("utf-8") == markup + + def test_nested_namespaces(self): + doc = b""" + + + + + +""" + soup = self.soup(doc) + assert doc == soup.encode() + + def test_formatter_processes_script_tag_for_xml_documents(self): + doc = """ + +""" + soup = BeautifulSoup(doc, "lxml-xml") + # lxml would have stripped this while parsing, but we can add + # it later. + soup.script.string = 'console.log("< < hey > > ");' + encoded = soup.encode() + assert b"< < hey > >" in encoded + + def test_can_parse_unicode_document(self): + markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.root.string + + def test_can_parse_unicode_document_begining_with_bom(self): + markup = '\N{BYTE ORDER MARK}Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.root.string + + def test_popping_namespaced_tag(self): + markup = 'b2012-07-02T20:33:42Zcd' + soup = self.soup(markup) + assert str(soup.rss) == markup + + def test_docstring_includes_correct_encoding(self): + soup = self.soup("") + assert soup.encode("latin1") == b'\n' + + def test_large_xml_document(self): + """A large XML document should come out the same as it went in.""" + markup = (b'\n' + + b'0' * (2**12) + + b'') + soup = self.soup(markup) + assert soup.encode("utf-8") == markup + + def test_tags_are_empty_element_if_and_only_if_they_are_empty(self): + self.assert_soup("

", "

") + self.assert_soup("

foo

") + + def test_namespaces_are_preserved(self): + markup = 'This tag is in the a namespaceThis tag is in the b namespace' + soup = self.soup(markup) + root = soup.root + assert "http://example.com/" == root['xmlns:a'] + assert "http://example.net/" == root['xmlns:b'] + + def test_closing_namespaced_tag(self): + markup = '

20010504

' + soup = self.soup(markup) + assert str(soup.p) == markup + + def test_namespaced_attributes(self): + markup = '' + soup = self.soup(markup) + assert str(soup.foo) == markup + + def test_namespaced_attributes_xml_namespace(self): + markup = 'bar' + soup = self.soup(markup) + assert str(soup.foo) == markup + + def test_find_by_prefixed_name(self): + doc = """ +foo + bar + baz + +""" + soup = self.soup(doc) + + # There are three tags. + assert 3 == len(soup.find_all('tag')) + + # But two of them are ns1:tag and one of them is ns2:tag. + assert 2 == len(soup.find_all('ns1:tag')) + assert 1 == len(soup.find_all('ns2:tag')) + + assert 1, len(soup.find_all('ns2:tag', key='value')) + assert 3, len(soup.find_all(['ns1:tag', 'ns2:tag'])) + + def test_copy_tag_preserves_namespace(self): + xml = """ +""" + + soup = self.soup(xml) + tag = soup.document + duplicate = copy.copy(tag) + + # The two tags have the same namespace prefix. + assert tag.prefix == duplicate.prefix + + def test_worst_case(self): + """Test the worst case (currently) for linking issues.""" + + soup = self.soup(BAD_DOCUMENT) + self.linkage_validator(soup) + + +class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest): + """Smoke test for a tree builder that supports HTML5.""" + + def test_real_xhtml_document(self): + # Since XHTML is not HTML5, HTML5 parsers are not tested to handle + # XHTML documents in any particular way. + pass + + def test_html_tags_have_namespace(self): + markup = "" + soup = self.soup(markup) + assert "http://www.w3.org/1999/xhtml" == soup.a.namespace + + def test_svg_tags_have_namespace(self): + markup = '' + soup = self.soup(markup) + namespace = "http://www.w3.org/2000/svg" + assert namespace == soup.svg.namespace + assert namespace == soup.circle.namespace + + + def test_mathml_tags_have_namespace(self): + markup = '5' + soup = self.soup(markup) + namespace = 'http://www.w3.org/1998/Math/MathML' + assert namespace == soup.math.namespace + assert namespace == soup.msqrt.namespace + + def test_xml_declaration_becomes_comment(self): + markup = '' + soup = self.soup(markup) + assert isinstance(soup.contents[0], Comment) + assert soup.contents[0] == '?xml version="1.0" encoding="utf-8"?' + assert "html" == soup.contents[0].next_element.name + +def skipIf(condition, reason): + def nothing(test, *args, **kwargs): + return None + + def decorator(test_item): + if condition: + return nothing + else: + return test_item + + return decorator diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/__init__.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..8ca2304 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder.cpython-310.pyc new file mode 100644 index 0000000..45b78ff Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder_registry.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder_registry.cpython-310.pyc new file mode 100644 index 0000000..b9f6532 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_builder_registry.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_dammit.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_dammit.cpython-310.pyc new file mode 100644 index 0000000..76a0978 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_dammit.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_docs.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_docs.cpython-310.pyc new file mode 100644 index 0000000..c2a1426 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_docs.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_element.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_element.cpython-310.pyc new file mode 100644 index 0000000..0faaf05 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_element.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_formatter.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_formatter.cpython-310.pyc new file mode 100644 index 0000000..c004bb3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_formatter.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_html5lib.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_html5lib.cpython-310.pyc new file mode 100644 index 0000000..ef7a244 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_html5lib.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_htmlparser.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_htmlparser.cpython-310.pyc new file mode 100644 index 0000000..af364bf Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_htmlparser.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_lxml.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_lxml.cpython-310.pyc new file mode 100644 index 0000000..b40279c Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_lxml.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_navigablestring.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_navigablestring.cpython-310.pyc new file mode 100644 index 0000000..b7e2f15 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_navigablestring.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_pageelement.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_pageelement.cpython-310.pyc new file mode 100644 index 0000000..d78dca3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_pageelement.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_soup.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_soup.cpython-310.pyc new file mode 100644 index 0000000..f553723 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_soup.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tag.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tag.cpython-310.pyc new file mode 100644 index 0000000..b1af6f8 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tag.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tree.cpython-310.pyc b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tree.cpython-310.pyc new file mode 100644 index 0000000..e5687d3 Binary files /dev/null and b/sbsheriff/Lib/site-packages/bs4/tests/__pycache__/test_tree.cpython-310.pyc differ diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_builder.py b/sbsheriff/Lib/site-packages/bs4/tests/test_builder.py new file mode 100644 index 0000000..7537071 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_builder.py @@ -0,0 +1,29 @@ +import pytest +from unittest.mock import patch +from bs4.builder import DetectsXMLParsedAsHTML + +class TestDetectsXMLParsedAsHTML(object): + + @pytest.mark.parametrize( + "markup,looks_like_xml", + [("No xml declaration", False), + ("obviously HTMLActually XHTML", False), + (" < html>Tricky XHTML", False), + ("", True), + ] + ) + def test_warn_if_markup_looks_like_xml(self, markup, looks_like_xml): + # Test of our ability to guess at whether markup looks XML-ish + # _and_ not HTML-ish. + with patch('bs4.builder.DetectsXMLParsedAsHTML._warn') as mock: + for data in markup, markup.encode('utf8'): + result = DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( + data + ) + assert result == looks_like_xml + if looks_like_xml: + assert mock.called + else: + assert not mock.called + mock.reset_mock() diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_builder_registry.py b/sbsheriff/Lib/site-packages/bs4/tests/test_builder_registry.py new file mode 100644 index 0000000..5fa874c --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_builder_registry.py @@ -0,0 +1,136 @@ +"""Tests of the builder registry.""" + +import pytest +import warnings + +from bs4 import BeautifulSoup +from bs4.builder import ( + builder_registry as registry, + HTMLParserTreeBuilder, + TreeBuilderRegistry, +) + +try: + from bs4.builder import HTML5TreeBuilder + HTML5LIB_PRESENT = True +except ImportError: + HTML5LIB_PRESENT = False + +try: + from bs4.builder import ( + LXMLTreeBuilderForXML, + LXMLTreeBuilder, + ) + LXML_PRESENT = True +except ImportError: + LXML_PRESENT = False + + +class TestBuiltInRegistry(object): + """Test the built-in registry with the default builders registered.""" + + def test_combination(self): + assert registry.lookup('strict', 'html') == HTMLParserTreeBuilder + if LXML_PRESENT: + assert registry.lookup('fast', 'html') == LXMLTreeBuilder + assert registry.lookup('permissive', 'xml') == LXMLTreeBuilderForXML + if HTML5LIB_PRESENT: + assert registry.lookup('html5lib', 'html') == HTML5TreeBuilder + + def test_lookup_by_markup_type(self): + if LXML_PRESENT: + assert registry.lookup('html') == LXMLTreeBuilder + assert registry.lookup('xml') == LXMLTreeBuilderForXML + else: + assert registry.lookup('xml') == None + if HTML5LIB_PRESENT: + assert registry.lookup('html') == HTML5TreeBuilder + else: + assert registry.lookup('html') == HTMLParserTreeBuilder + + def test_named_library(self): + if LXML_PRESENT: + assert registry.lookup('lxml', 'xml') == LXMLTreeBuilderForXML + assert registry.lookup('lxml', 'html') == LXMLTreeBuilder + if HTML5LIB_PRESENT: + assert registry.lookup('html5lib') == HTML5TreeBuilder + + assert registry.lookup('html.parser') == HTMLParserTreeBuilder + + def test_beautifulsoup_constructor_does_lookup(self): + + with warnings.catch_warnings(record=True) as w: + # This will create a warning about not explicitly + # specifying a parser, but we'll ignore it. + + # You can pass in a string. + BeautifulSoup("", features="html") + # Or a list of strings. + BeautifulSoup("", features=["html", "fast"]) + pass + + # You'll get an exception if BS can't find an appropriate + # builder. + with pytest.raises(ValueError): + BeautifulSoup("", features="no-such-feature") + +class TestRegistry(object): + """Test the TreeBuilderRegistry class in general.""" + + def setup_method(self): + self.registry = TreeBuilderRegistry() + + def builder_for_features(self, *feature_list): + cls = type('Builder_' + '_'.join(feature_list), + (object,), {'features' : feature_list}) + + self.registry.register(cls) + return cls + + def test_register_with_no_features(self): + builder = self.builder_for_features() + + # Since the builder advertises no features, you can't find it + # by looking up features. + assert self.registry.lookup('foo') is None + + # But you can find it by doing a lookup with no features, if + # this happens to be the only registered builder. + assert self.registry.lookup() == builder + + def test_register_with_features_makes_lookup_succeed(self): + builder = self.builder_for_features('foo', 'bar') + assert self.registry.lookup('foo') is builder + assert self.registry.lookup('bar') is builder + + def test_lookup_fails_when_no_builder_implements_feature(self): + builder = self.builder_for_features('foo', 'bar') + assert self.registry.lookup('baz') is None + + def test_lookup_gets_most_recent_registration_when_no_feature_specified(self): + builder1 = self.builder_for_features('foo') + builder2 = self.builder_for_features('bar') + assert self.registry.lookup() == builder2 + + def test_lookup_fails_when_no_tree_builders_registered(self): + assert self.registry.lookup() is None + + def test_lookup_gets_most_recent_builder_supporting_all_features(self): + has_one = self.builder_for_features('foo') + has_the_other = self.builder_for_features('bar') + has_both_early = self.builder_for_features('foo', 'bar', 'baz') + has_both_late = self.builder_for_features('foo', 'bar', 'quux') + lacks_one = self.builder_for_features('bar') + has_the_other = self.builder_for_features('foo') + + # There are two builders featuring 'foo' and 'bar', but + # the one that also features 'quux' was registered later. + assert self.registry.lookup('foo', 'bar') == has_both_late + + # There is only one builder featuring 'foo', 'bar', and 'baz'. + assert self.registry.lookup('foo', 'bar', 'baz') == has_both_early + + def test_lookup_fails_when_cannot_reconcile_requested_features(self): + builder1 = self.builder_for_features('foo', 'bar') + builder2 = self.builder_for_features('foo', 'baz') + assert self.registry.lookup('bar', 'baz') is None diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_dammit.py b/sbsheriff/Lib/site-packages/bs4/tests/test_dammit.py new file mode 100644 index 0000000..9971234 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_dammit.py @@ -0,0 +1,371 @@ +# encoding: utf-8 +import pytest +import logging +import bs4 +from bs4 import BeautifulSoup +from bs4.dammit import ( + EntitySubstitution, + EncodingDetector, + UnicodeDammit, +) + +class TestUnicodeDammit(object): + """Standalone tests of UnicodeDammit.""" + + def test_unicode_input(self): + markup = "I'm already Unicode! \N{SNOWMAN}" + dammit = UnicodeDammit(markup) + assert dammit.unicode_markup == markup + + def test_smart_quotes_to_unicode(self): + markup = b"\x91\x92\x93\x94" + dammit = UnicodeDammit(markup) + assert dammit.unicode_markup == "\u2018\u2019\u201c\u201d" + + def test_smart_quotes_to_xml_entities(self): + markup = b"\x91\x92\x93\x94" + dammit = UnicodeDammit(markup, smart_quotes_to="xml") + assert dammit.unicode_markup == "‘’“”" + + def test_smart_quotes_to_html_entities(self): + markup = b"\x91\x92\x93\x94" + dammit = UnicodeDammit(markup, smart_quotes_to="html") + assert dammit.unicode_markup == "‘’“”" + + def test_smart_quotes_to_ascii(self): + markup = b"\x91\x92\x93\x94" + dammit = UnicodeDammit(markup, smart_quotes_to="ascii") + assert dammit.unicode_markup == """''""""" + + def test_detect_utf8(self): + utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83" + dammit = UnicodeDammit(utf8) + assert dammit.original_encoding.lower() == 'utf-8' + assert dammit.unicode_markup == 'Sacr\xe9 bleu! \N{SNOWMAN}' + + def test_convert_hebrew(self): + hebrew = b"\xed\xe5\xec\xf9" + dammit = UnicodeDammit(hebrew, ["iso-8859-8"]) + assert dammit.original_encoding.lower() == 'iso-8859-8' + assert dammit.unicode_markup == '\u05dd\u05d5\u05dc\u05e9' + + def test_dont_see_smart_quotes_where_there_are_none(self): + utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch" + dammit = UnicodeDammit(utf_8) + assert dammit.original_encoding.lower() == 'utf-8' + assert dammit.unicode_markup.encode("utf-8") == utf_8 + + def test_ignore_inappropriate_codecs(self): + utf8_data = "Räksmörgås".encode("utf-8") + dammit = UnicodeDammit(utf8_data, ["iso-8859-8"]) + assert dammit.original_encoding.lower() == 'utf-8' + + def test_ignore_invalid_codecs(self): + utf8_data = "Räksmörgås".encode("utf-8") + for bad_encoding in ['.utf8', '...', 'utF---16.!']: + dammit = UnicodeDammit(utf8_data, [bad_encoding]) + assert dammit.original_encoding.lower() == 'utf-8' + + def test_exclude_encodings(self): + # This is UTF-8. + utf8_data = "Räksmörgås".encode("utf-8") + + # But if we exclude UTF-8 from consideration, the guess is + # Windows-1252. + dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"]) + assert dammit.original_encoding.lower() == 'windows-1252' + + # And if we exclude that, there is no valid guess at all. + dammit = UnicodeDammit( + utf8_data, exclude_encodings=["utf-8", "windows-1252"]) + assert dammit.original_encoding == None + +class TestEncodingDetector(object): + + def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self): + detected = EncodingDetector( + b'') + encodings = list(detected.encodings) + assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings + + def test_detect_html5_style_meta_tag(self): + + for data in ( + b'', + b"", + b"", + b""): + dammit = UnicodeDammit(data, is_html=True) + assert "euc-jp" == dammit.original_encoding + + def test_last_ditch_entity_replacement(self): + # This is a UTF-8 document that contains bytestrings + # completely incompatible with UTF-8 (ie. encoded with some other + # encoding). + # + # Since there is no consistent encoding for the document, + # Unicode, Dammit will eventually encode the document as UTF-8 + # and encode the incompatible characters as REPLACEMENT + # CHARACTER. + # + # If chardet is installed, it will detect that the document + # can be converted into ISO-8859-1 without errors. This happens + # to be the wrong encoding, but it is a consistent encoding, so the + # code we're testing here won't run. + # + # So we temporarily disable chardet if it's present. + doc = b"""\357\273\277 +\330\250\330\252\330\261 +\310\322\321\220\312\321\355\344""" + chardet = bs4.dammit.chardet_dammit + logging.disable(logging.WARNING) + try: + def noop(str): + return None + bs4.dammit.chardet_dammit = noop + dammit = UnicodeDammit(doc) + assert True == dammit.contains_replacement_characters + assert "\ufffd" in dammit.unicode_markup + + soup = BeautifulSoup(doc, "html.parser") + assert soup.contains_replacement_characters + finally: + logging.disable(logging.NOTSET) + bs4.dammit.chardet_dammit = chardet + + def test_byte_order_mark_removed(self): + # A document written in UTF-16LE will have its byte order marker stripped. + data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' + dammit = UnicodeDammit(data) + assert "áé" == dammit.unicode_markup + assert "utf-16le" == dammit.original_encoding + + def test_known_definite_versus_user_encodings(self): + # The known_definite_encodings are used before sniffing the + # byte-order mark; the user_encodings are used afterwards. + + # Here's a document in UTF-16LE. + data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' + dammit = UnicodeDammit(data) + + # We can process it as UTF-16 by passing it in as a known + # definite encoding. + before = UnicodeDammit(data, known_definite_encodings=["utf-16"]) + assert "utf-16" == before.original_encoding + + # If we pass UTF-18 as a user encoding, it's not even + # tried--the encoding sniffed from the byte-order mark takes + # precedence. + after = UnicodeDammit(data, user_encodings=["utf-8"]) + assert "utf-16le" == after.original_encoding + assert ["utf-16le"] == [x[0] for x in dammit.tried_encodings] + + # Here's a document in ISO-8859-8. + hebrew = b"\xed\xe5\xec\xf9" + dammit = UnicodeDammit(hebrew, known_definite_encodings=["utf-8"], + user_encodings=["iso-8859-8"]) + + # The known_definite_encodings don't work, BOM sniffing does + # nothing (it only works for a few UTF encodings), but one of + # the user_encodings does work. + assert "iso-8859-8" == dammit.original_encoding + assert ["utf-8", "iso-8859-8"] == [x[0] for x in dammit.tried_encodings] + + def test_deprecated_override_encodings(self): + # override_encodings is a deprecated alias for + # known_definite_encodings. + hebrew = b"\xed\xe5\xec\xf9" + dammit = UnicodeDammit( + hebrew, + known_definite_encodings=["shift-jis"], + override_encodings=["utf-8"], + user_encodings=["iso-8859-8"], + ) + assert "iso-8859-8" == dammit.original_encoding + + # known_definite_encodings and override_encodings were tried + # before user_encodings. + assert ["shift-jis", "utf-8", "iso-8859-8"] == ( + [x[0] for x in dammit.tried_encodings] + ) + + def test_detwingle(self): + # Here's a UTF8 document. + utf8 = ("\N{SNOWMAN}" * 3).encode("utf8") + + # Here's a Windows-1252 document. + windows_1252 = ( + "\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!" + "\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252") + + # Through some unholy alchemy, they've been stuck together. + doc = utf8 + windows_1252 + utf8 + + # The document can't be turned into UTF-8: + with pytest.raises(UnicodeDecodeError): + doc.decode("utf8") + + # Unicode, Dammit thinks the whole document is Windows-1252, + # and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃" + + # But if we run it through fix_embedded_windows_1252, it's fixed: + fixed = UnicodeDammit.detwingle(doc) + assert "☃☃☃“Hi, I like Windows!”☃☃☃" == fixed.decode("utf8") + + def test_detwingle_ignores_multibyte_characters(self): + # Each of these characters has a UTF-8 representation ending + # in \x93. \x93 is a smart quote if interpreted as + # Windows-1252. But our code knows to skip over multibyte + # UTF-8 characters, so they'll survive the process unscathed. + for tricky_unicode_char in ( + "\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93' + "\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93' + "\xf0\x90\x90\x93", # This is a CJK character, not sure which one. + ): + input = tricky_unicode_char.encode("utf8") + assert input.endswith(b'\x93') + output = UnicodeDammit.detwingle(input) + assert output == input + + def test_find_declared_encoding(self): + # Test our ability to find a declared encoding inside an + # XML or HTML document. + # + # Even if the document comes in as Unicode, it may be + # interesting to know what encoding was claimed + # originally. + + html_unicode = '' + html_bytes = html_unicode.encode("ascii") + + xml_unicode= '' + xml_bytes = xml_unicode.encode("ascii") + + m = EncodingDetector.find_declared_encoding + assert m(html_unicode, is_html=False) is None + assert "utf-8" == m(html_unicode, is_html=True) + assert "utf-8" == m(html_bytes, is_html=True) + + assert "iso-8859-1" == m(xml_unicode) + assert "iso-8859-1" == m(xml_bytes) + + # Normally, only the first few kilobytes of a document are checked for + # an encoding. + spacer = b' ' * 5000 + assert m(spacer + html_bytes) is None + assert m(spacer + xml_bytes) is None + + # But you can tell find_declared_encoding to search an entire + # HTML document. + assert ( + m(spacer + html_bytes, is_html=True, search_entire_document=True) + == "utf-8" + ) + + # The XML encoding declaration has to be the very first thing + # in the document. We'll allow whitespace before the document + # starts, but nothing else. + assert m(xml_bytes, search_entire_document=True) == "iso-8859-1" + assert m(b' ' + xml_bytes, search_entire_document=True) == "iso-8859-1" + assert m(b'a' + xml_bytes, search_entire_document=True) is None + + +class TestEntitySubstitution(object): + """Standalone tests of the EntitySubstitution class.""" + def setup_method(self): + self.sub = EntitySubstitution + + def test_simple_html_substitution(self): + # Unicode characters corresponding to named HTML entites + # are substituted, and no others. + s = "foo\u2200\N{SNOWMAN}\u00f5bar" + assert self.sub.substitute_html(s) == "foo∀\N{SNOWMAN}õbar" + + def test_smart_quote_substitution(self): + # MS smart quotes are a common source of frustration, so we + # give them a special test. + quotes = b"\x91\x92foo\x93\x94" + dammit = UnicodeDammit(quotes) + assert self.sub.substitute_html(dammit.markup) == "‘’foo“”" + + def test_html5_entity(self): + # Some HTML5 entities correspond to single- or multi-character + # Unicode sequences. + + for entity, u in ( + # A few spot checks of our ability to recognize + # special character sequences and convert them + # to named entities. + ('⊧', '\u22a7'), + ('𝔑', '\U0001d511'), + ('≧̸', '\u2267\u0338'), + ('¬', '\xac'), + ('⫬', '\u2aec'), + + # We _could_ convert | to &verbarr;, but we don't, because + # | is an ASCII character. + ('|' '|'), + + # Similarly for the fj ligature, which we could convert to + # fj, but we don't. + ("fj", "fj"), + + # We do convert _these_ ASCII characters to HTML entities, + # because that's required to generate valid HTML. + ('>', '>'), + ('<', '<'), + ('&', '&'), + ): + template = '3 %s 4' + raw = template % u + with_entities = template % entity + assert self.sub.substitute_html(raw) == with_entities + + def test_html5_entity_with_variation_selector(self): + # Some HTML5 entities correspond either to a single-character + # Unicode sequence _or_ to the same character plus U+FE00, + # VARIATION SELECTOR 1. We can handle this. + data = "fjords \u2294 penguins" + markup = "fjords ⊔ penguins" + assert self.sub.substitute_html(data) == markup + + data = "fjords \u2294\ufe00 penguins" + markup = "fjords ⊔︀ penguins" + assert self.sub.substitute_html(data) == markup + + def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self): + s = 'Welcome to "my bar"' + assert self.sub.substitute_xml(s, False) == s + + def test_xml_attribute_quoting_normally_uses_double_quotes(self): + assert self.sub.substitute_xml("Welcome", True) == '"Welcome"' + assert self.sub.substitute_xml("Bob's Bar", True) == '"Bob\'s Bar"' + + def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self): + s = 'Welcome to "my bar"' + assert self.sub.substitute_xml(s, True) == "'Welcome to \"my bar\"'" + + def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self): + s = 'Welcome to "Bob\'s Bar"' + assert self.sub.substitute_xml(s, True) == '"Welcome to "Bob\'s Bar""' + + def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self): + quoted = 'Welcome to "Bob\'s Bar"' + assert self.sub.substitute_xml(quoted) == quoted + + def test_xml_quoting_handles_angle_brackets(self): + assert self.sub.substitute_xml("foo") == "foo<bar>" + + def test_xml_quoting_handles_ampersands(self): + assert self.sub.substitute_xml("AT&T") == "AT&T" + + def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self): + assert self.sub.substitute_xml("ÁT&T") == "&Aacute;T&T" + + def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self): + assert self.sub.substitute_xml_containing_entities("ÁT&T") == "ÁT&T" + + def test_quotes_not_html_substituted(self): + """There's no need to do this except inside attribute values.""" + text = 'Bob\'s "bar"' + assert self.sub.substitute_html(text) == text diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_docs.py b/sbsheriff/Lib/site-packages/bs4/tests/test_docs.py new file mode 100644 index 0000000..0194d69 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_docs.py @@ -0,0 +1,38 @@ +"Test harness for doctests." + +# TODO: Pretty sure this isn't used and should be deleted. + +# pylint: disable-msg=E0611,W0142 + +__metaclass__ = type +__all__ = [ + 'additional_tests', + ] + +import atexit +import doctest +import os +#from pkg_resources import ( +# resource_filename, resource_exists, resource_listdir, cleanup_resources) +import unittest + +DOCTEST_FLAGS = ( + doctest.ELLIPSIS | + doctest.NORMALIZE_WHITESPACE | + doctest.REPORT_NDIFF) + + +# def additional_tests(): +# "Run the doc tests (README.txt and docs/*, if any exist)" +# doctest_files = [ +# os.path.abspath(resource_filename('bs4', 'README.txt'))] +# if resource_exists('bs4', 'docs'): +# for name in resource_listdir('bs4', 'docs'): +# if name.endswith('.txt'): +# doctest_files.append( +# os.path.abspath( +# resource_filename('bs4', 'docs/%s' % name))) +# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS) +# atexit.register(cleanup_resources) +# return unittest.TestSuite(( +# doctest.DocFileSuite(*doctest_files, **kwargs))) diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_element.py b/sbsheriff/Lib/site-packages/bs4/tests/test_element.py new file mode 100644 index 0000000..6d08ab5 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_element.py @@ -0,0 +1,74 @@ +"""Tests of classes in element.py. + +The really big classes -- Tag, PageElement, and NavigableString -- +are tested in separate files. +""" + +from bs4.element import ( + CharsetMetaAttributeValue, + ContentMetaAttributeValue, + NamespacedAttribute, +) +from . import SoupTest + + +class TestNamedspacedAttribute(object): + + def test_name_may_be_none_or_missing(self): + a = NamespacedAttribute("xmlns", None) + assert a == "xmlns" + + a = NamespacedAttribute("xmlns", "") + assert a == "xmlns" + + a = NamespacedAttribute("xmlns") + assert a == "xmlns" + + def test_namespace_may_be_none_or_missing(self): + a = NamespacedAttribute(None, "tag") + assert a == "tag" + + a = NamespacedAttribute("", "tag") + assert a == "tag" + + def test_attribute_is_equivalent_to_colon_separated_string(self): + a = NamespacedAttribute("a", "b") + assert "a:b" == a + + def test_attributes_are_equivalent_if_prefix_and_name_identical(self): + a = NamespacedAttribute("a", "b", "c") + b = NamespacedAttribute("a", "b", "c") + assert a == b + + # The actual namespace is not considered. + c = NamespacedAttribute("a", "b", None) + assert a == c + + # But name and prefix are important. + d = NamespacedAttribute("a", "z", "c") + assert a != d + + e = NamespacedAttribute("z", "b", "c") + assert a != e + + +class TestAttributeValueWithCharsetSubstitution(object): + """Certain attributes are designed to have the charset of the + final document substituted into their value. + """ + + def test_content_meta_attribute_value(self): + # The value of a CharsetMetaAttributeValue is whatever + # encoding the string is in. + value = CharsetMetaAttributeValue("euc-jp") + assert "euc-jp" == value + assert "euc-jp" == value.original_value + assert "utf8" == value.encode("utf8") + assert "ascii" == value.encode("ascii") + + def test_content_meta_attribute_value(self): + value = ContentMetaAttributeValue("text/html; charset=euc-jp") + assert "text/html; charset=euc-jp" == value + assert "text/html; charset=euc-jp" == value.original_value + assert "text/html; charset=utf8" == value.encode("utf8") + assert "text/html; charset=ascii" == value.encode("ascii") diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_formatter.py b/sbsheriff/Lib/site-packages/bs4/tests/test_formatter.py new file mode 100644 index 0000000..84d4e3b --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_formatter.py @@ -0,0 +1,113 @@ +import pytest + +from bs4.element import Tag +from bs4.formatter import ( + Formatter, + HTMLFormatter, + XMLFormatter, +) +from . import SoupTest + +class TestFormatter(SoupTest): + + def test_default_attributes(self): + # Test the default behavior of Formatter.attributes(). + formatter = Formatter() + tag = Tag(name="tag") + tag['b'] = 1 + tag['a'] = 2 + + # Attributes come out sorted by name. In Python 3, attributes + # normally come out of a dictionary in the order they were + # added. + assert [('a', 2), ('b', 1)] == formatter.attributes(tag) + + # This works even if Tag.attrs is None, though this shouldn't + # normally happen. + tag.attrs = None + assert [] == formatter.attributes(tag) + + assert ' ' == formatter.indent + + def test_sort_attributes(self): + # Test the ability to override Formatter.attributes() to, + # e.g., disable the normal sorting of attributes. + class UnsortedFormatter(Formatter): + def attributes(self, tag): + self.called_with = tag + for k, v in sorted(tag.attrs.items()): + if k == 'ignore': + continue + yield k,v + + soup = self.soup('

') + formatter = UnsortedFormatter() + decoded = soup.decode(formatter=formatter) + + # attributes() was called on the

tag. It filtered out one + # attribute and sorted the other two. + assert formatter.called_with == soup.p + assert '

' == decoded + + def test_empty_attributes_are_booleans(self): + # Test the behavior of empty_attributes_are_booleans as well + # as which Formatters have it enabled. + + for name in ('html', 'minimal', None): + formatter = HTMLFormatter.REGISTRY[name] + assert False == formatter.empty_attributes_are_booleans + + formatter = XMLFormatter.REGISTRY[None] + assert False == formatter.empty_attributes_are_booleans + + formatter = HTMLFormatter.REGISTRY['html5'] + assert True == formatter.empty_attributes_are_booleans + + # Verify that the constructor sets the value. + formatter = Formatter(empty_attributes_are_booleans=True) + assert True == formatter.empty_attributes_are_booleans + + # Now demonstrate what it does to markup. + for markup in ( + "", + '' + ): + soup = self.soup(markup) + for formatter in ('html', 'minimal', 'xml', None): + assert b'' == soup.option.encode(formatter='html') + assert b'' == soup.option.encode(formatter='html5') + + @pytest.mark.parametrize( + "indent,expect", + [ + (None, '\n\ntext\n\n'), + (-1, '\n\ntext\n\n'), + (0, '\n\ntext\n\n'), + ("", '\n\ntext\n\n'), + + (1, '\n \n text\n \n'), + (2, '\n \n text\n \n'), + + ("\t", '\n\t\n\t\ttext\n\t\n'), + ('abc', '\nabc\nabcabctext\nabc\n'), + + # Some invalid inputs -- the default behavior is used. + (object(), '\n \n text\n \n'), + (b'bytes', '\n \n text\n \n'), + ] + ) + def test_indent(self, indent, expect): + # Pretty-print a tree with a Formatter set to + # indent in a certain way and verify the results. + soup = self.soup("text") + formatter = Formatter(indent=indent) + assert soup.prettify(formatter=formatter) == expect + + # Pretty-printing only happens with prettify(), not + # encode(). + assert soup.encode(formatter=formatter) != expect + + def test_default_indent_value(self): + formatter = Formatter() + assert formatter.indent == ' ' + diff --git a/sbsheriff/Lib/site-packages/bs4/tests/test_html5lib.py b/sbsheriff/Lib/site-packages/bs4/tests/test_html5lib.py new file mode 100644 index 0000000..b32ab30 --- /dev/null +++ b/sbsheriff/Lib/site-packages/bs4/tests/test_html5lib.py @@ -0,0 +1,223 @@ +"""Tests to ensure that the html5lib tree builder generates good trees.""" + +import warnings + +try: + from bs4.builder import HTML5TreeBuilder + HTML5LIB_PRESENT = True +except ImportError as e: + HTML5LIB_PRESENT = False +from bs4.element import SoupStrainer +from . import ( + HTML5TreeBuilderSmokeTest, + SoupTest, + skipIf, +) + +@skipIf( + not HTML5LIB_PRESENT, + "html5lib seems not to be present, not testing its tree builder.") +class TestHTML5LibBuilder(SoupTest, HTML5TreeBuilderSmokeTest): + """See ``HTML5TreeBuilderSmokeTest``.""" + + @property + def default_builder(self): + return HTML5TreeBuilder + + def test_soupstrainer(self): + # The html5lib tree builder does not support SoupStrainers. + strainer = SoupStrainer("b") + markup = "

A bold statement.

" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup, parse_only=strainer) + assert soup.decode() == self.document_for(markup) + + assert "the html5lib tree builder doesn't support parse_only" in str(w[0].message) + + def test_correctly_nested_tables(self): + """html5lib inserts tags where other parsers don't.""" + markup = ('' + '' + "') + + self.assert_soup( + markup, + '
Here's another table:" + '' + '' + '
foo
Here\'s another table:' + '
foo
' + '
') + + self.assert_soup( + "" + "" + "
Foo
Bar
Baz
") + + def test_xml_declaration_followed_by_doctype(self): + markup = ''' + + + + + +

foo

+ +''' + soup = self.soup(markup) + # Verify that we can reach the

tag; this means the tree is connected. + assert b"

foo

" == soup.p.encode() + + def test_reparented_markup(self): + markup = '

foo

\n

bar

' + soup = self.soup(markup) + assert "

foo

\n

bar

" == soup.body.decode() + assert 2 == len(soup.find_all('p')) + + + def test_reparented_markup_ends_with_whitespace(self): + markup = '

foo

\n

bar

\n' + soup = self.soup(markup) + assert "

foo

\n

bar

\n" == soup.body.decode() + assert 2 == len(soup.find_all('p')) + + def test_reparented_markup_containing_identical_whitespace_nodes(self): + """Verify that we keep the two whitespace nodes in this + document distinct when reparenting the adjacent tags. + """ + markup = '
' + soup = self.soup(markup) + space1, space2 = soup.find_all(string=' ') + tbody1, tbody2 = soup.find_all('tbody') + assert space1.next_element is tbody1 + assert tbody2.next_element is space2 + + def test_reparented_markup_containing_children(self): + markup = '' + soup = self.soup(markup) + noscript = soup.noscript + assert "target" == noscript.next_element + target = soup.find(string='target') + + # The 'aftermath' string was duplicated; we want the second one. + final_aftermath = soup.find_all(string='aftermath')[-1] + + # The