ThongCoder commited on
Commit
709c473
·
verified ·
1 Parent(s): 10a24ed

Upload 16 files

Browse files
Files changed (16) hide show
  1. .gitattributes +71 -35
  2. .gitignore +6 -0
  3. Dockerfile +40 -0
  4. README.md +11 -11
  5. api.py +93 -0
  6. app.py +44 -0
  7. cloudflare.py +87 -0
  8. db/persistence.py +192 -0
  9. dump.py +37 -0
  10. misc.py +73 -0
  11. models.py +77 -0
  12. public/live-pairs.html +30 -0
  13. public/pair.txt +0 -0
  14. requirements.txt +3 -0
  15. run.py +32 -0
  16. scan.py +202 -0
.gitattributes CHANGED
@@ -1,35 +1,71 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.sqlite filter=lfs diff=lfs merge =lfs -bin*.7z filter=lfs diff=lfs merge=lfs -text
37
+ *.arrow filter=lfs diff=lfs merge=lfs -text
38
+ *.bin filter=lfs diff=lfs merge=lfs -text
39
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
40
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
41
+ *.ftz filter=lfs diff=lfs merge=lfs -text
42
+ *.gz filter=lfs diff=lfs merge=lfs -text
43
+ *.h5 filter=lfs diff=lfs merge=lfs -text
44
+ *.joblib filter=lfs diff=lfs merge=lfs -text
45
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
46
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
47
+ *.model filter=lfs diff=lfs merge=lfs -text
48
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
49
+ *.npy filter=lfs diff=lfs merge=lfs -text
50
+ *.npz filter=lfs diff=lfs merge=lfs -text
51
+ *.onnx filter=lfs diff=lfs merge=lfs -text
52
+ *.ot filter=lfs diff=lfs merge=lfs -text
53
+ *.parquet filter=lfs diff=lfs merge=lfs -text
54
+ *.pb filter=lfs diff=lfs merge=lfs -text
55
+ *.pickle filter=lfs diff=lfs merge=lfs -text
56
+ *.pkl filter=lfs diff=lfs merge=lfs -text
57
+ *.pt filter=lfs diff=lfs merge=lfs -text
58
+ *.pth filter=lfs diff=lfs merge=lfs -text
59
+ *.rar filter=lfs diff=lfs merge=lfs -text
60
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
61
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
62
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
63
+ *.tar filter=lfs diff=lfs merge=lfs -text
64
+ *.tflite filter=lfs diff=lfs merge=lfs -text
65
+ *.tgz filter=lfs diff=lfs merge=lfs -text
66
+ *.wasm filter=lfs diff=lfs merge=lfs -text
67
+ *.xz filter=lfs diff=lfs merge=lfs -text
68
+ *.zip filter=lfs diff=lfs merge=lfs -text
69
+ *.zst filter=lfs diff=lfs merge=lfs -text
70
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
71
+ *.sqlite filter=lfs diff=lfs merge =lfs -bin
.gitignore ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ .venv/
2
+ __pycache__/
3
+ cache.sqlite
4
+ db/cache.sqlite
5
+ .cloudflare-headers-cache.json
6
+ .vscode/settings.json
Dockerfile ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ # Set environment to non-interactive (avoids tzdata prompts)
4
+ ENV DEBIAN_FRONTEND=noninteractive
5
+
6
+ # Update packages and install dependencies for Rust
7
+ RUN apt-get update && apt-get install -y --no-install-recommends \
8
+ curl \
9
+ build-essential \
10
+ pkg-config \
11
+ libffi-dev \
12
+ libssl-dev \
13
+ ca-certificates \
14
+ git \
15
+ && rm -rf /var/lib/apt/lists/*
16
+
17
+ # Install Rust (via official rustup script)
18
+ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y
19
+
20
+ # Add Rust to PATH
21
+ ENV PATH="/root/.cargo/bin:$PATH"
22
+
23
+ # Set working directory
24
+ WORKDIR /app
25
+
26
+ # Copy your files
27
+ COPY . /app
28
+ ENV TMPDIR=/app/tmp
29
+ RUN mkdir -p /app/tmp && chmod -R 777 /app/tmp
30
+ RUN mkdir -p /app/.cache && chmod -R 777 /app/.cache
31
+ RUN chmod -R 777 /app
32
+
33
+ # Confirm versions
34
+ RUN rustc --version && cargo --version
35
+
36
+ # Optional: install Python deps
37
+ RUN python3 -m pip install -r requirements.txt
38
+
39
+ EXPOSE 7860
40
+ CMD [ "python", "app.py" ]
README.md CHANGED
@@ -1,11 +1,11 @@
1
- ---
2
- title: Infinite Craft Searcher
3
- emoji: 📊
4
- colorFrom: indigo
5
- colorTo: pink
6
- sdk: docker
7
- pinned: false
8
- license: mit
9
- ---
10
-
11
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ ---
2
+ title: Infinite Craft Searcher
3
+ emoji: 🌖
4
+ colorFrom: blue
5
+ colorTo: red
6
+ sdk: docker
7
+ pinned: false
8
+ license: mit
9
+ ---
10
+
11
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
api.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+
3
+ from curl_cffi import requests
4
+
5
+ from models import Element, Pair, PendingPair
6
+
7
+
8
+ def raw_make_pair(
9
+ first: str,
10
+ second: str,
11
+ headers: dict[str, str],
12
+ *,
13
+ timeout: float = 30,
14
+ ) -> tuple[str, str | None, bool | None]:
15
+ response = requests.Session(impersonate='chrome').get(
16
+ "https://neal.fun/api/infinite-craft/pair",
17
+ params={"first": first, "second": second},
18
+ headers=headers,
19
+ timeout=timeout * 1000,
20
+ )
21
+ response.raise_for_status()
22
+ data = response.json()
23
+
24
+ if "result" not in data:
25
+ msg = f"Invalid response: {data!r}"
26
+ raise ValueError(msg)
27
+
28
+ return data["result"], data.get("emoji"), data.get("isNew")
29
+
30
+
31
+ def make_pair(
32
+ pair: PendingPair,
33
+ headers: dict[str, str],
34
+ *,
35
+ timeout: float = 30,
36
+ ) -> Pair:
37
+ result, emoji, is_new = raw_make_pair(
38
+ pair.first.name,
39
+ pair.second.name,
40
+ headers,
41
+ timeout=timeout,
42
+ )
43
+ return Pair(
44
+ pair.first,
45
+ pair.second,
46
+ Element(result, emoji),
47
+ is_new,
48
+ )
49
+
50
+
51
+ def make_pair_exp_backoff(
52
+ pair: PendingPair,
53
+ headers: dict[str, str],
54
+ *,
55
+ timeout: float = 30,
56
+ ) -> Pair:
57
+ started_at = time.perf_counter()
58
+ backoff = 1
59
+ while True:
60
+ exc = None
61
+ try:
62
+ eta = timeout - (time.perf_counter() - started_at)
63
+ return make_pair(pair, headers, timeout=eta)
64
+ except requests.RequestsError as e:
65
+ if e.args and e.args[0].startswith("HTTP Error 500:"):
66
+ raise # don't bother retrying
67
+ exc = e
68
+ except Exception as e:
69
+ exc = e
70
+
71
+ eta = timeout - (time.perf_counter() - started_at)
72
+ if eta < backoff:
73
+ msg = f"Ran out of time while making the pair: {pair}"
74
+ raise TimeoutError(msg) from exc
75
+
76
+ time.sleep(backoff)
77
+ backoff = min(backoff * 2, 60)
78
+
79
+
80
+ if __name__ == "__main__":
81
+ import cloudflare
82
+
83
+ headers = cloudflare.get_headers()
84
+
85
+ pair = make_pair(
86
+ PendingPair(
87
+ Element(input("First Element: ")),
88
+ Element(input("Second Element: ")),
89
+ ),
90
+ headers,
91
+ )
92
+
93
+ print(pair)
app.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import threading
3
+ import time
4
+ from fastapi import FastAPI
5
+ from fastapi.responses import FileResponse, HTMLResponse, PlainTextResponse, StreamingResponse
6
+ from fastapi.staticfiles import StaticFiles
7
+ import uvicorn
8
+
9
+ from run import start_finder
10
+
11
+
12
+ app = FastAPI()
13
+ app.mount('/file/db', StaticFiles(directory='db/', html=False), '/file/db')
14
+ def tail_file(path):
15
+ with open(path, "r", encoding="utf-8") as f:
16
+ f.seek(0, os.SEEK_END) # start at EOF
17
+ while True:
18
+ line = f.readline()
19
+ if line:
20
+ yield f"data: {line.strip()}\n\n"
21
+ else:
22
+ time.sleep(1)
23
+
24
+ @app.get("/raw-pair")
25
+ def send_raw_pair():
26
+ with open('public/pair.txt') as f:
27
+ return PlainTextResponse(f.read())
28
+
29
+ @app.get("/live-pair")
30
+ def live_pairs():
31
+ with open('public/live-pairs.html') as f:
32
+ return HTMLResponse(f.read())
33
+
34
+ @app.get("/stream-pair")
35
+ def stream_pairs():
36
+ return StreamingResponse(tail_file('public/pair.txt'), media_type="text/event-stream")
37
+
38
+ @app.on_event('startup')
39
+ def start():
40
+ runner = threading.Thread(target=start_finder, daemon=True)
41
+ runner.start()
42
+
43
+ if __name__ == "__main__":
44
+ uvicorn.run(app, host='0.0.0.0', port=7860)
cloudflare.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import json
3
+ import re
4
+ from pathlib import Path
5
+ import time
6
+
7
+ import api
8
+ import misc
9
+
10
+
11
+ def parse_curl_string(curl_string: str) -> dict[str, str]:
12
+ headers = {}
13
+ header_lines = curl_string.strip().splitlines()
14
+
15
+ for header_line in header_lines:
16
+ found = re.search(r"-H\s*'(.*?)\s*:\s*(.*)'", header_line)
17
+ if not found:
18
+ continue
19
+
20
+ headers[found.group(1)] = found.group(2)
21
+
22
+ return headers
23
+
24
+
25
+ def prompt_for_headers() -> dict[str, str]:
26
+ while True:
27
+ print(
28
+ "Follow these directions to bypass CloudFlare anti-bot restrictions.\n"
29
+ " 1. Go to https://neal.fun/infinite-craft/\n"
30
+ " 2. Open the developer tools, and select the Network tab\n"
31
+ " 3. Make any pair\n"
32
+ " 4. In the Network tab, find the network request that was made (search 'pair')\n"
33
+ " 5. Right click on it -> Copy -> Copy as cURL (bash)\n"
34
+ " 6. Come back to the terminal, and press 'Enter'.",
35
+ )
36
+ curl_string = misc.header_bank_and_impersonate()
37
+ if curl_string.startswith("curl 'https://neal.fun/api/infinite-craft/pair?first="):
38
+ headers = parse_curl_string(curl_string)
39
+ if headers:
40
+ return headers
41
+
42
+ print(
43
+ "Hmm... it doesn't look like your clipboard contains the right data. Try again.\n\n",
44
+ )
45
+
46
+
47
+ def verify_headers(headers: dict[str, str]) -> Exception | None:
48
+ try:
49
+ api.raw_make_pair("Fire", "Water", headers)
50
+ except Exception as e:
51
+ return e
52
+
53
+ return None
54
+
55
+
56
+ def get_headers(verify: bool = True) -> dict[str, str]:
57
+ filename = Path(__file__).parent / ".cloudflare-headers-cache.json"
58
+ try:
59
+ with filename.open() as f:
60
+ headers = json.load(f)
61
+ except Exception as e:
62
+ pass
63
+ else:
64
+ if verify and verify_headers(headers) is None:
65
+ return headers
66
+ with contextlib.suppress(Exception):
67
+ filename.unlink()
68
+
69
+ headers = prompt_for_headers()
70
+ if verify:
71
+ error = verify_headers(headers)
72
+ if error is not None:
73
+ print(f"Those headers are not valid! Error: {error!r}")
74
+ print("Try again.\n")
75
+ time.sleep(90)
76
+ return get_headers(verify)
77
+
78
+ with contextlib.suppress(Exception), filename.open("w") as f:
79
+ json.dump(headers, f, indent=2)
80
+
81
+ return headers
82
+
83
+
84
+ if __name__ == "__main__":
85
+ headers = get_headers()
86
+ print("Your (working) headers:")
87
+ print(json.dumps(headers, indent=2))
db/persistence.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ import sqlite3
3
+ from typing import Generator, Literal
4
+
5
+ from models import Element, Pair, PendingPair
6
+
7
+
8
+ def connect() -> sqlite3.Connection:
9
+ return sqlite3.connect("db/cache.sqlite")
10
+
11
+
12
+ with connect() as conn:
13
+ conn.execute(
14
+ """
15
+ CREATE TABLE IF NOT EXISTS element (
16
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
17
+ first_created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
18
+ name TEXT UNIQUE,
19
+ emoji TEXT
20
+ )
21
+ """,
22
+ )
23
+
24
+ conn.execute(
25
+ """
26
+ CREATE TABLE IF NOT EXISTS pair (
27
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
28
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
29
+ first_element_id INTEGER,
30
+ second_element_id INTEGER,
31
+ result_element_id INTEGER,
32
+ is_discovery INTEGER,
33
+ FOREIGN KEY (first_element_id) REFERENCES element (id),
34
+ FOREIGN KEY (second_element_id) REFERENCES element (id),
35
+ FOREIGN KEY (result_element_id) REFERENCES element (id)
36
+ UNIQUE(first_element_id, second_element_id)
37
+ )
38
+ """,
39
+ )
40
+
41
+
42
+ def _upsert_element(conn: sqlite3.Connection, element: Element) -> None:
43
+ conn.execute(
44
+ """
45
+ INSERT INTO element (name, emoji)
46
+ VALUES (?, ?)
47
+ ON CONFLICT(name) DO UPDATE SET
48
+ emoji = excluded.emoji
49
+ """,
50
+ (element.name, element.emoji),
51
+ )
52
+
53
+ (element.database_id,) = conn.execute(
54
+ "SELECT id FROM element WHERE name = ?",
55
+ (element.name,),
56
+ ).fetchone()
57
+
58
+
59
+ def _upsert_pair(conn: sqlite3.Connection, pair: Pair) -> None:
60
+ # first, insert the elements:
61
+ for element in pair.elements:
62
+ if element.database_id is not None:
63
+ continue
64
+
65
+ _upsert_element(conn, element)
66
+
67
+ # now, record the pair:
68
+ conn.execute(
69
+ """
70
+ INSERT INTO pair (first_element_id, second_element_id, result_element_id, is_discovery)
71
+ VALUES (?, ?, ?, ?)
72
+ ON CONFLICT(first_element_id, second_element_id) DO UPDATE SET
73
+ result_element_id = excluded.result_element_id,
74
+ is_discovery = MAX(is_discovery, excluded.is_discovery)
75
+ """,
76
+ (*(e.database_id for e in pair.elements), 1 if pair.is_discovery else 0),
77
+ )
78
+
79
+
80
+ def record_pair(pair: Pair) -> None:
81
+ with connect() as conn:
82
+ _upsert_pair(conn, pair)
83
+
84
+
85
+ PendingPairOrder = Literal[
86
+ "first.id ASC, second.id ASC",
87
+ "first.id ASC, second.id DESC",
88
+ "first.id DESC, second.id ASC",
89
+ "first.id DESC, second.id DESC"
90
+ ]
91
+ PENDING_PAIR_ORDERS: list[PendingPairOrder] = [
92
+ "first.id DESC, second.id ASC",
93
+ "first.id ASC, second.id ASC",
94
+ "first.id ASC, second.id DESC",
95
+ ]
96
+
97
+
98
+ def _select_pending_pairs(
99
+ conn: sqlite3.Connection,
100
+ order: PendingPairOrder = PENDING_PAIR_ORDERS[0],
101
+ ) -> Generator[PendingPair, None, None]:
102
+ result = conn.execute(
103
+ f"""
104
+ SELECT
105
+ first.id,
106
+ first.name,
107
+ first.emoji,
108
+ second.id,
109
+ second.name,
110
+ second.emoji
111
+ FROM element AS first
112
+ LEFT JOIN element AS second ON first.name <= second.name
113
+ LEFT JOIN pair ON pair.first_element_id = first.id AND pair.second_element_id = second.id
114
+ WHERE pair.id IS NULL
115
+ ORDER BY {order}
116
+ """,
117
+ )
118
+
119
+ for row in result:
120
+ first_id, first_name, first_emoji, second_id, second_name, second_emoji = row
121
+
122
+ yield PendingPair(
123
+ Element(first_name, first_emoji, first_id),
124
+ Element(second_name, second_emoji, second_id),
125
+ )
126
+
127
+
128
+ def select_pending_pairs(order: PendingPairOrder) -> Generator[PendingPair, None, None]:
129
+ with connect() as conn:
130
+ yield from _select_pending_pairs(conn, order)
131
+
132
+
133
+ def _element_count(conn: sqlite3.Connection) -> int:
134
+ (count,) = conn.execute("SELECT COUNT(*) FROM element").fetchone()
135
+ return count
136
+
137
+
138
+ def _pair_count(conn: sqlite3.Connection) -> int:
139
+ (count,) = conn.execute("SELECT COUNT(*) FROM pair").fetchone()
140
+ return count
141
+
142
+
143
+ def counts() -> tuple[int, int]:
144
+ with connect() as conn:
145
+ return _element_count(conn), _pair_count(conn)
146
+
147
+
148
+ def _select_elements_and_discovered(
149
+ conn: sqlite3.Connection,
150
+ ) -> Generator[tuple[Element, bool], None, None]:
151
+ result = conn.execute(
152
+ """
153
+ SELECT
154
+ e.name,
155
+ e.emoji,
156
+ e.id,
157
+ MAX(p.result_element_id IS NOT NULL) AS is_discovery
158
+ FROM element e
159
+ LEFT JOIN pair p
160
+ ON p.result_element_id = e.id
161
+ AND p.is_discovery = TRUE
162
+ GROUP BY e.name, e.emoji, e.id
163
+ ORDER BY e.id ASC
164
+ """,
165
+ )
166
+
167
+ for row in result:
168
+ *e, is_discovery = row
169
+
170
+ yield Element(*e), is_discovery
171
+
172
+
173
+ def select_elements_and_discovered() -> Generator[tuple[Element, bool], None, None]:
174
+ with connect() as conn:
175
+ return _select_elements_and_discovered(conn)
176
+
177
+
178
+ with connect() as conn:
179
+ primary_elements = [
180
+ Element("Fire", "\N{FIRE}"),
181
+ Element("Earth", "\N{EARTH GLOBE EUROPE-AFRICA}"),
182
+ Element("Water", "\N{DROPLET}"),
183
+ Element("Wind", "\N{WIND BLOWING FACE}\N{VARIATION SELECTOR-16}"),
184
+ ]
185
+
186
+ # The search order is "mostly deterministic" on the macroscopic scale
187
+ # so randomize the order of the primary elements so that everyone who runs
188
+ # this code gets one of 4! (factorial) possible "macroscopic routes"
189
+ random.shuffle(primary_elements)
190
+
191
+ for e in primary_elements:
192
+ _upsert_element(conn, e)
dump.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from textwrap import dedent
3
+
4
+ import db.persistence as persistence
5
+
6
+
7
+ def dump() -> None:
8
+ data = [
9
+ (element.emoji, element.name, discovered)
10
+ for element, discovered in persistence.select_elements_and_discovered()
11
+ ]
12
+
13
+ print(
14
+ dedent(
15
+ f"""
16
+ let data = {json.dumps(data)};
17
+ let storage = JSON.parse(localStorage.getItem("infinite-craft-data")) || {{}};
18
+ storage.elements = storage.elements || [];
19
+
20
+ const nameSet = new Set(storage.elements.map(element => element.text));
21
+
22
+ data.forEach(element => {{
23
+ let [emoji, name, discovered] = element;
24
+
25
+ if (!nameSet.has(name)) {{
26
+ storage.elements.push({{ text: name, emoji: emoji, discovered: discovered }});
27
+ }}
28
+ }});
29
+
30
+ localStorage.setItem("infinite-craft-data", JSON.stringify(storage));
31
+ """,
32
+ ).strip(),
33
+ )
34
+
35
+
36
+ if __name__ == "__main__":
37
+ dump()
misc.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import random
3
+ from curl_cffi import requests
4
+
5
+ def header_bank_and_impersonate(browser='chrome'):
6
+ langs = [
7
+ "vi,en-US;q=0.9,en;q=0.8",
8
+ "en-US,en;q=0.9",
9
+ "en-GB,en;q=0.8",
10
+ "fr-FR,fr;q=0.9,en;q=0.8",
11
+ "zh-CN,zh;q=0.9,en;q=0.8",
12
+ "ja,en;q=0.9",
13
+ ]
14
+ lang = random.choice(langs)
15
+ is_mobile = False # Change to True for mobile headers
16
+ platform = "\"Android\"" if is_mobile else random.choice(["\"Windows\"", "\"macOS\"", "\"Linux\"", "\"Chrome OS\""])
17
+ secchua = (
18
+ "\"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"138\", "
19
+ f"\"{'Google Chrome' if browser == 'chrome' else 'Microsoft Edge'}\";v=\"138\""
20
+ )
21
+ user_agent = (
22
+ "Mozilla/5.0 (Linux; Android 13; SM-G991B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Mobile Safari/537.36"
23
+ if is_mobile else
24
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36"
25
+ )
26
+
27
+ # Example query
28
+ a, b = "Aquarium", "Avalanche"
29
+
30
+ curl = rf"""
31
+ curl 'https://neal.fun/api/infinite-craft/pair?first={a}&second={b}' \
32
+ -H 'accept: */*' \
33
+ -H 'accept-language: {lang}' \
34
+ -H 'referer: https://neal.fun/infinite-craft/' \
35
+ -H 'referrer-policy: strict-origin-when-cross-origin' \
36
+ -H 'sec-ch-ua: {secchua}' \
37
+ -H 'sec-ch-ua-mobile: {"?1" if is_mobile else "?0"}' \
38
+ -H 'sec-ch-ua-platform: {platform}' \
39
+ -H 'sec-fetch-dest: empty' \
40
+ -H 'sec-fetch-mode: cors' \
41
+ -H 'sec-fetch-site: same-origin' \
42
+ -H 'user-agent: {user_agent}' \
43
+ --compressed
44
+ """
45
+ return curl.strip()
46
+
47
+ def log_pair(pair):
48
+ with open('public/pair.txt', 'at+') as f:
49
+ f.write(pair + '\n')
50
+
51
+ def trigger_push():
52
+ token = os.getenv('GITHUB_PAT')
53
+ repo = "ThongAccount/hf-dataset-pusher"
54
+ workflow = "update.yml"
55
+
56
+ url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow}/dispatches"
57
+ headers = {
58
+ "Accept": "application/vnd.github+json",
59
+ "Authorization": f"Bearer {token}",
60
+ "User-Agent": "hf-dataset-pusher",
61
+ "X-GitHub-Api-Version": "2022-11-28"
62
+ }
63
+ data = {"ref": "main",
64
+ "inputs": {
65
+ "base_url": "https://thongcoder-infinite-craft-searcher.hf.space"
66
+ }
67
+ }
68
+
69
+ resp = requests.post(url, headers=headers, json=data)
70
+ if resp.ok:
71
+ print("✅ Triggered GitHub Action successfully.")
72
+ else:
73
+ print(f"❌ Failed to trigger Action: {resp.status_code} → {resp.text}")
models.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+
4
+ class Element:
5
+ def __init__(
6
+ self,
7
+ name: str,
8
+ emoji: str | None = None,
9
+ database_id: int | None = None,
10
+ ) -> None:
11
+ self.name = name
12
+ self.emoji = emoji or "\N{BLACK QUESTION MARK ORNAMENT}"
13
+ self.database_id = database_id
14
+
15
+ def __hash__(self) -> int:
16
+ return hash(self.name)
17
+
18
+ def __eq__(self, other: "Element") -> bool:
19
+ return self.name == other.name
20
+
21
+ def __str__(self) -> str:
22
+ return f"{self.emoji} {self.name}"
23
+
24
+ def __repr__(self) -> str:
25
+ return repr(str(self))
26
+
27
+ @property
28
+ def numeric(self) -> bool:
29
+ return re.search(r"\d", self.name) is not None
30
+
31
+
32
+ class PendingPair:
33
+ def __init__(self, first: Element, second: Element) -> None:
34
+ self.first, self.second = (
35
+ (first, second) if first.name < second.name else (second, first)
36
+ )
37
+
38
+ def __hash__(self) -> int:
39
+ return hash((self.first, self.second))
40
+
41
+ def __eq__(self, other: "Pair") -> bool:
42
+ return self.first == other.first and self.second == other.second
43
+
44
+ def __str__(self) -> str:
45
+ return f"{self.first} + {self.second}"
46
+
47
+ def __repr__(self) -> str:
48
+ return f"{self.first!r} + {self.second!r}"
49
+
50
+ @property
51
+ def numeric(self) -> bool:
52
+ return self.first.numeric or self.second.numeric
53
+
54
+
55
+ class Pair(PendingPair):
56
+ def __init__(
57
+ self,
58
+ first: Element,
59
+ second: Element,
60
+ result: Element,
61
+ is_discovery: bool | None = None,
62
+ ) -> None:
63
+ super().__init__(first, second)
64
+ self.result = result
65
+ self.is_discovery = is_discovery is True
66
+
67
+ def __str__(self) -> str:
68
+ addendum = " (New Discovery!)" if self.is_discovery else ""
69
+ return f"{super().__str__()} = {self.result}{addendum}"
70
+
71
+ def __repr__(self) -> str:
72
+ addendum = " (New Discovery!)" if self.is_discovery else ""
73
+ return f"{super().__repr__()} = {self.result!r}{addendum}"
74
+
75
+ @property
76
+ def elements(self) -> tuple[Element, Element, Element]:
77
+ return self.first, self.second, self.result
public/live-pairs.html ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!-- public/live.html -->
2
+ <!DOCTYPE html>
3
+ <html>
4
+ <head>
5
+ <meta charset="utf-8">
6
+ <title>Live Pairing Stream</title>
7
+ <style>
8
+ body { font-family: sans-serif; padding: 1em; }
9
+ #log { white-space: pre-wrap; font-family: monospace; }
10
+ </style>
11
+ </head>
12
+ <body>
13
+ <h1>🔁 Infinite Craft: Live Pairing</h1>
14
+ <div id="log">Waiting for stream...</div>
15
+
16
+ <script>
17
+ const logDiv = document.getElementById("log");
18
+ const es = new EventSource("/stream-pair");
19
+
20
+ es.onmessage = (event) => {
21
+ logDiv.textContent = event.data + "\n" + logDiv.textContent;
22
+ };
23
+
24
+ es.onerror = () => {
25
+ logDiv.textContent = "❌ Connection lost. Refresh to retry.\n" + logDiv.textContent;
26
+ es.close();
27
+ };
28
+ </script>
29
+ </body>
30
+ </html>
public/pair.txt ADDED
File without changes
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ curl_cffi
2
+ fastapi
3
+ uvicorn
run.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import concurrent.futures
2
+ import os, curl_cffi.requests as requests
3
+ import time
4
+ from scan import scan
5
+
6
+
7
+ def start_finder():
8
+ try:
9
+ url = 'https://huggingface.co/datasets/ThongCoder/infinite-craft/resolve/main/cache.sqlite'
10
+ filename = 'db/cache.sqlite'
11
+ root_dir = os.path.dirname(os.path.abspath(__file__)) # points to /app
12
+ dest_path = os.path.join(root_dir, filename)
13
+
14
+ print(f"Downloading from: {url}")
15
+ response = requests.get(url, stream=True)
16
+ response.raise_for_status()
17
+
18
+ with open(dest_path, "wb") as f:
19
+ for chunk in response.iter_content(chunk_size=8192):
20
+ f.write(chunk)
21
+
22
+ print(f"Saved to: {dest_path}")
23
+ time.sleep(.5)
24
+ except Exception as e: print(f'Error hit: {e}'); pass
25
+
26
+ while True:
27
+ try: scan(False, .2, 64)
28
+ except concurrent.futures._base.TimeoutError: continue
29
+ except Exception as e: print(f'Unexpected error:\n{e.with_traceback}'); break
30
+
31
+ if __name__ == "__main__":
32
+ start_finder()
scan.py ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ from concurrent.futures import Future, ThreadPoolExecutor, as_completed
3
+ from typing import Generator, TypeAlias
4
+
5
+ import api
6
+ import cloudflare
7
+ from misc import log_pair, trigger_push
8
+ import db.persistence as persistence
9
+ from models import Pair, PendingPair
10
+
11
+ Failed: TypeAlias = set[PendingPair]
12
+ Futures: TypeAlias = dict[Future[Pair], PendingPair]
13
+ Headers: TypeAlias = dict[str, str]
14
+
15
+
16
+ def valid_pending_pairs(
17
+ allow_numbers: bool,
18
+ *,
19
+ failed: Failed,
20
+ futures: Futures,
21
+ order: persistence.PendingPairOrder,
22
+ ) -> Generator[PendingPair, None, None]:
23
+ for pending_pair in persistence.select_pending_pairs(order):
24
+ if not allow_numbers and pending_pair.numeric:
25
+ continue
26
+
27
+ if pending_pair in failed:
28
+ continue
29
+
30
+ if pending_pair in futures.values():
31
+ continue
32
+
33
+ yield pending_pair
34
+
35
+
36
+ def queue_pair(
37
+ executor: ThreadPoolExecutor,
38
+ pending_pair: PendingPair,
39
+ futures: Futures,
40
+ *,
41
+ headers: Headers,
42
+ ) -> None:
43
+ futures[
44
+ executor.submit(
45
+ api.make_pair_exp_backoff,
46
+ pending_pair,
47
+ headers,
48
+ timeout=5,
49
+ )
50
+ ] = pending_pair
51
+
52
+
53
+ def push_one_future(
54
+ executor: ThreadPoolExecutor,
55
+ futures: Futures,
56
+ *,
57
+ allow_numbers: bool,
58
+ failed: Failed,
59
+ headers: Headers,
60
+ order: persistence.PendingPairOrder,
61
+ ) -> bool:
62
+ for pending_pair in valid_pending_pairs(
63
+ allow_numbers,
64
+ failed=failed,
65
+ futures=futures,
66
+ order=order,
67
+ ):
68
+ queue_pair(executor, pending_pair, futures, headers=headers)
69
+ return True
70
+ return False
71
+
72
+
73
+ def handle_completed_futures(
74
+ futures: Futures,
75
+ *,
76
+ failed: Failed,
77
+ timeout: float,
78
+ ) -> Generator[Pair | None, None, None]:
79
+ n_elements, n_pairs = persistence.counts()
80
+ log_line = f"Pairs: {n_pairs:,d} Elements: {n_elements:,d}"
81
+ last_n_elements = n_elements
82
+ for future in as_completed(futures, timeout=timeout):
83
+ pending_pair = futures.pop(future)
84
+ try:
85
+ pair = future.result()
86
+ except TimeoutError:
87
+ print(f"[API TIMED OUT] {pending_pair}".ljust(len(log_line)))
88
+ print(log_line, end="\r")
89
+ failed.add(pending_pair)
90
+ yield None
91
+ continue
92
+ except Exception as e:
93
+ print(f"[API FAILED - {e!r}] {pending_pair}".ljust(len(log_line)))
94
+ print(log_line, end="\r")
95
+ failed.add(pending_pair)
96
+ yield None
97
+ continue
98
+
99
+ try:
100
+ persistence.record_pair(pair)
101
+ except Exception as e:
102
+ print(f"[DATABASE FAILED - {e!r}] {pair}".ljust(len(log_line)))
103
+ print(log_line, end="\r")
104
+ failed.add(pending_pair)
105
+ yield None
106
+ continue
107
+
108
+ yield pair
109
+
110
+ n_elements, n_pairs = persistence.counts()
111
+ log_line = f"Pairs: {n_pairs:,d} Elements: {n_elements:,d}"
112
+
113
+ print(f"Pair #{n_pairs}: {str(pair)}")
114
+ log_pair(f"Pair #{n_pairs}: {str(pair)}")
115
+ if n_elements != last_n_elements:
116
+ res_name = pair.result.name
117
+ res_emoji = pair.result.emoji
118
+ res_id = pair.result.database_id
119
+ print(f'New element: {res_emoji} {res_name} (ID {res_id})')
120
+ log_pair(f'New element: {res_emoji} {res_name} (ID {res_id})')
121
+ if n_pairs % 10000 == 0:
122
+ print(f'Reached {n_pairs} pairs. Sending to DB.')
123
+ trigger_push()
124
+ time.sleep(90)
125
+
126
+ def now() -> float:
127
+ return time.perf_counter()
128
+
129
+
130
+ def scan(allow_numbers: bool, seconds_per_request: float, threads: int) -> None:
131
+ threads = max(threads, 1)
132
+
133
+ headers: Headers = cloudflare.get_headers()
134
+ failed: Failed = set()
135
+ futures: Futures = {}
136
+
137
+ orders = persistence.PENDING_PAIR_ORDERS.copy()
138
+
139
+ with ThreadPoolExecutor(threads) as executor:
140
+
141
+ def shutdown() -> None:
142
+ executor.shutdown(False, cancel_futures=True)
143
+ incomplete_futures = [f for f in futures if not f.done()]
144
+ if not incomplete_futures:
145
+ return
146
+
147
+ n = len(incomplete_futures)
148
+
149
+ before = time.perf_counter()
150
+ print(f"[SHUTTING DOWN] 0/{n} threads terminated...", end="\r")
151
+ for i, _ in enumerate(as_completed(incomplete_futures), 1):
152
+ print(f"[SHUTTING DOWN] {i}/{n} threads terminated...", end="\r")
153
+ duration = 1000 * (time.perf_counter() - before)
154
+ print(f"[SHUTDOWN] {n} thread(s) completed in {duration:.2f} milliseconds.")
155
+
156
+ while True:
157
+ if len(futures) < threads * 2:
158
+ pushed = push_one_future(
159
+ executor,
160
+ futures,
161
+ allow_numbers=allow_numbers,
162
+ failed=failed,
163
+ headers=headers,
164
+ order=orders[0],
165
+ )
166
+
167
+ if not pushed:
168
+ if failed:
169
+ failed.clear()
170
+ continue
171
+
172
+ if not futures:
173
+ print("Completed! All possible pairs have been made!")
174
+ return
175
+
176
+ next_future_at = now() + seconds_per_request
177
+ try:
178
+ for pair in handle_completed_futures(
179
+ futures,
180
+ failed=failed,
181
+ timeout=next_future_at - now(),
182
+ ):
183
+ if not pair or pair.result.name.lower() == "nothing":
184
+ orders.insert(0, orders.pop())
185
+ except TimeoutError:
186
+ pass
187
+ except Exception as e:
188
+ pass
189
+
190
+ delay_remaining = next_future_at - now()
191
+ if delay_remaining < 0:
192
+ continue
193
+
194
+ try:
195
+ time.sleep(delay_remaining)
196
+ except:
197
+ shutdown()
198
+ raise
199
+
200
+
201
+ if __name__ == "__main__":
202
+ scan(False, 0.25, 8)