Compare commits

...

4 Commits

Author SHA1 Message Date
b90eea57f0 Added the downloader for images 2025-03-15 19:56:23 +01:00
6e2e3c05ad Added items.json from dataset 2025-03-15 19:50:34 +01:00
920b48776e Created script to import all data into database 2025-03-15 19:50:15 +01:00
2d5e568b62 Changed database structure 2025-03-15 19:50:01 +01:00
4 changed files with 26034 additions and 3 deletions

25915
items.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -29,13 +29,21 @@ CREATE TABLE items (
'name' TEXT NOT NULL,
'rarity' INTEGER NOT NULL,
'image' TEXT NOT NULL,
'case' INTEGER NOT NULL,
FOREIGN KEY ('case') REFERENCES cases ('id')
'price' FLOAT NOT NULL
);
CREATE TABLE cases (
'id' INTEGER PRIMARY KEY NOT NULL ,
'uuid' TEXT UNIQUE NOT NULL,
'name' TEXT NOT NULL,
'image' TEXT NOT NULL
'image' TEXT NOT NULL,
'price' FLOAT NOT NULL
);
CREATE TABLE items_cases (
'item' INTEGER NOT NULL,
'case' INTEGER NOT NULL,
FOREIGN KEY ('item') REFERENCES items ('id'),
FOREIGN KEY ('case') REFERENCES cases ('id'),
PRIMARY KEY ('item', 'case')
);

58
scripts/downloader.py Normal file
View File

@@ -0,0 +1,58 @@
from pathlib import Path
from time import sleep
import requests
import json
import os
if not Path("images/").exists():
os.mkdir("images/")
if not Path("images/weapons/").exists():
os.mkdir("images/weapons/")
if not Path("images/cases/").exists():
os.mkdir("images/cases/")
with open("items.json") as f:
items = json.load(f)
weapon_images = []
case_images = []
for item in items:
weapon_images.append(item["img_url"])
for case in item["cases"]:
case_images.append(case["img_url"])
weapon_images = list(set(weapon_images))
case_images = list(set(case_images))
for url in weapon_images:
print(f"\rDownloading {url}")
r = requests.get(url.replace(".png", ".webp"))
r.cookies.set("cookie_notice_accepted", "true")
while r.status_code == 429:
sleep(0.2)
r = requests.get(url.replace(".png", ".webp"))
r.cookies.set("cookie_notice_accepted", "true")
if r.status_code == 200:
with open(f"images/weapons/{url.split('/')[-1]}", "wb") as f:
r.raw.decode_content = True
f.write(r.content)
else:
print(f"Error {r.reason}, {r.status_code}")
for url in case_images:
print(f"\rDownloading {url}")
r = requests.get(url.replace(".png", ".webp"))
r.cookies.set("cookie_notice_accepted", "true")
while r.status_code == 429:
sleep(0.2)
r = requests.get(url.replace(".png", ".webp"))
r.cookies.set("cookie_notice_accepted", "true")
if r.status_code == 200:
with open(f"images/cases/{url.split('/')[-1]}", "wb") as f:
r.raw.decode_content = True
f.write(r.content)
else:
print(f"Error {r.reason}, {r.status_code}")

50
scripts/import_data.py Normal file
View File

@@ -0,0 +1,50 @@
import json
import sqlite3
import uuid
RARITY_MAPPING = {
"Consumer": 0,
"Industrial": 1,
"Mil-spec": 2,
"Restricted": 3,
"Classified": 4,
"Covert": 5,
"Contraband": 6
}
with open("items.json", "r") as f:
items = json.load(f)
rcases = []
for item in items:
item_cases = item["cases"]
rcases.extend(item_cases)
cases = []
for case in rcases:
if case not in cases:
cases.append(case)
conn = sqlite3.connect("database.db")
cur = conn.cursor()
for case in cases:
uid = uuid.uuid4()
image = "/images/cases/" + case["img_url"].split("/")[-1]
case_id = conn.execute("INSERT INTO cases ('uuid', 'name', 'image', 'price') VALUES (?, ?, ?, ?) RETURNING id",
[str(uid), case["name"], image, case["price"]]).fetchone()
case["id"] = case_id[0]
for item in items:
uid = uuid.uuid4()
image = "/images/items/" + item["img_url"].split("/")[-1]
item_id = conn.execute(
"INSERT INTO items ('uuid', 'name', 'rarity', 'image', 'price') VALUES (?, ?, ?, ?, ?) RETURNING id",
[str(uid), item["name"], RARITY_MAPPING[item["rarity"]], image, item["price"]]).fetchone()
for case in item["cases"]:
for rcase in cases:
if rcase["name"] == case["name"]:
bcase = rcase
conn.execute("INSERT INTO items_cases ('item', 'case') VALUES (?, ?)", [item_id[0], bcase["id"]])
conn.commit()