Merge remote-tracking branch 'aWeinzierl/fix-encoding'
This commit is contained in:
commit
f29c165357
@ -13,6 +13,7 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from functools import partial
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import os.path
|
import os.path
|
||||||
import json
|
import json
|
||||||
@ -21,6 +22,7 @@ from PIL import Image
|
|||||||
|
|
||||||
from . import matrix
|
from . import matrix
|
||||||
|
|
||||||
|
open_utf8 = partial(open, encoding='UTF-8')
|
||||||
|
|
||||||
def convert_image(data: bytes) -> (bytes, int, int):
|
def convert_image(data: bytes) -> (bytes, int, int):
|
||||||
image: Image.Image = Image.open(BytesIO(data)).convert("RGBA")
|
image: Image.Image = Image.open(BytesIO(data)).convert("RGBA")
|
||||||
@ -41,7 +43,7 @@ def convert_image(data: bytes) -> (bytes, int, int):
|
|||||||
def add_to_index(name: str, output_dir: str) -> None:
|
def add_to_index(name: str, output_dir: str) -> None:
|
||||||
index_path = os.path.join(output_dir, "index.json")
|
index_path = os.path.join(output_dir, "index.json")
|
||||||
try:
|
try:
|
||||||
with open(index_path) as index_file:
|
with open_utf8(index_path) as index_file:
|
||||||
index_data = json.load(index_file)
|
index_data = json.load(index_file)
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
index_data = {"packs": []}
|
index_data = {"packs": []}
|
||||||
@ -49,7 +51,7 @@ def add_to_index(name: str, output_dir: str) -> None:
|
|||||||
index_data["homeserver_url"] = matrix.homeserver_url
|
index_data["homeserver_url"] = matrix.homeserver_url
|
||||||
if name not in index_data["packs"]:
|
if name not in index_data["packs"]:
|
||||||
index_data["packs"].append(name)
|
index_data["packs"].append(name)
|
||||||
with open(index_path, "w") as index_file:
|
with open_utf8(index_path, "w") as index_file:
|
||||||
json.dump(index_data, index_file, indent=" ")
|
json.dump(index_data, index_file, indent=" ")
|
||||||
print(f"Added {name} to {index_path}")
|
print(f"Added {name} to {index_path}")
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ async def main(args: argparse.Namespace) -> None:
|
|||||||
dirname = os.path.basename(os.path.abspath(args.path))
|
dirname = os.path.basename(os.path.abspath(args.path))
|
||||||
meta_path = os.path.join(args.path, "pack.json")
|
meta_path = os.path.join(args.path, "pack.json")
|
||||||
try:
|
try:
|
||||||
with open(meta_path) as pack_file:
|
with util.open_utf8(meta_path) as pack_file:
|
||||||
pack = json.load(pack_file)
|
pack = json.load(pack_file)
|
||||||
print(f"Loaded existing pack meta from {meta_path}")
|
print(f"Loaded existing pack meta from {meta_path}")
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
@ -112,14 +112,14 @@ async def main(args: argparse.Namespace) -> None:
|
|||||||
if sticker:
|
if sticker:
|
||||||
pack["stickers"].append(sticker)
|
pack["stickers"].append(sticker)
|
||||||
|
|
||||||
with open(meta_path, "w") as pack_file:
|
with util.open_utf8(meta_path, "w") as pack_file:
|
||||||
json.dump(pack, pack_file)
|
json.dump(pack, pack_file)
|
||||||
print(f"Wrote pack to {meta_path}")
|
print(f"Wrote pack to {meta_path}")
|
||||||
|
|
||||||
if args.add_to_index:
|
if args.add_to_index:
|
||||||
picker_file_name = f"{pack['id']}.json"
|
picker_file_name = f"{pack['id']}.json"
|
||||||
picker_pack_path = os.path.join(args.add_to_index, picker_file_name)
|
picker_pack_path = os.path.join(args.add_to_index, picker_file_name)
|
||||||
with open(picker_pack_path, "w") as pack_file:
|
with util.open_utf8(picker_pack_path, "w") as pack_file:
|
||||||
json.dump(pack, pack_file)
|
json.dump(pack, pack_file)
|
||||||
print(f"Copied pack to {picker_pack_path}")
|
print(f"Copied pack to {picker_pack_path}")
|
||||||
util.add_to_index(picker_file_name, args.add_to_index)
|
util.add_to_index(picker_file_name, args.add_to_index)
|
||||||
|
@ -19,12 +19,12 @@ import json
|
|||||||
index_path = "../web/packs/index.json"
|
index_path = "../web/packs/index.json"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(index_path) as index_file:
|
with util.open_utf8(index_path) as index_file:
|
||||||
index_data = json.load(index_file)
|
index_data = json.load(index_file)
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
index_data = {"packs": []}
|
index_data = {"packs": []}
|
||||||
|
|
||||||
with open(sys.argv[-1]) as file:
|
with util.open_utf8(sys.argv[-1]) as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
|
|
||||||
for pack in data["assets"]:
|
for pack in data["assets"]:
|
||||||
@ -45,12 +45,12 @@ for pack in data["assets"]:
|
|||||||
}
|
}
|
||||||
filename = f"scalar-{pack['name'].replace(' ', '_')}.json"
|
filename = f"scalar-{pack['name'].replace(' ', '_')}.json"
|
||||||
pack_path = f"web/packs/{filename}"
|
pack_path = f"web/packs/{filename}"
|
||||||
with open(pack_path, "w") as pack_file:
|
with util.open_utf8(pack_path, "w") as pack_file:
|
||||||
json.dump(pack_data, pack_file)
|
json.dump(pack_data, pack_file)
|
||||||
print(f"Wrote {title} to {pack_path}")
|
print(f"Wrote {title} to {pack_path}")
|
||||||
if filename not in index_data["packs"]:
|
if filename not in index_data["packs"]:
|
||||||
index_data["packs"].append(filename)
|
index_data["packs"].append(filename)
|
||||||
|
|
||||||
with open(index_path, "w") as index_file:
|
with util.open_utf8(index_path, "w") as index_file:
|
||||||
json.dump(index_data, index_file, indent=" ")
|
json.dump(index_data, index_file, indent=" ")
|
||||||
print(f"Updated {index_path}")
|
print(f"Updated {index_path}")
|
||||||
|
@ -71,7 +71,7 @@ async def reupload_pack(client: TelegramClient, pack: StickerSetFull, output_dir
|
|||||||
|
|
||||||
already_uploaded = {}
|
already_uploaded = {}
|
||||||
try:
|
try:
|
||||||
with open(pack_path) as pack_file:
|
with util.open_utf8(pack_path) as pack_file:
|
||||||
existing_pack = json.load(pack_file)
|
existing_pack = json.load(pack_file)
|
||||||
already_uploaded = {int(sticker["net.maunium.telegram.sticker"]["id"]): sticker
|
already_uploaded = {int(sticker["net.maunium.telegram.sticker"]["id"]): sticker
|
||||||
for sticker in existing_pack["stickers"]}
|
for sticker in existing_pack["stickers"]}
|
||||||
@ -99,7 +99,7 @@ async def reupload_pack(client: TelegramClient, pack: StickerSetFull, output_dir
|
|||||||
doc["body"] = sticker.emoticon
|
doc["body"] = sticker.emoticon
|
||||||
doc["net.maunium.telegram.sticker"]["emoticons"].append(sticker.emoticon)
|
doc["net.maunium.telegram.sticker"]["emoticons"].append(sticker.emoticon)
|
||||||
|
|
||||||
with open(pack_path, "w") as pack_file:
|
with util.open_utf8(pack_path, "w") as pack_file:
|
||||||
json.dump({
|
json.dump({
|
||||||
"title": pack.set.title,
|
"title": pack.set.title,
|
||||||
"id": f"tg-{pack.set.id}",
|
"id": f"tg-{pack.set.id}",
|
||||||
|
Loading…
Reference in New Issue
Block a user