Speed up pathing, read resources
This commit is contained in:
@@ -1,8 +1,19 @@
|
|||||||
from pyautogui import *
|
from pyautogui import *
|
||||||
import pyautogui
|
import pyautogui
|
||||||
import keyboard
|
import keyboard
|
||||||
|
import win32api, win32con
|
||||||
|
|
||||||
WAIT4CLICK = 0.04
|
WAIT4CLICK = 0.001
|
||||||
|
|
||||||
|
def click(x,y):
|
||||||
|
win32api.SetCursorPos((x,y))
|
||||||
|
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN,0,0)
|
||||||
|
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP,0,0)
|
||||||
|
|
||||||
|
def right_click(x,y):
|
||||||
|
win32api.SetCursorPos((x,y))
|
||||||
|
win32api.mouse_event(win32con.MOUSEEVENTF_RIGHTDOWN,0,0)
|
||||||
|
win32api.mouse_event(win32con.MOUSEEVENTF_RIGHTUP,0,0)
|
||||||
|
|
||||||
def order_peasants(number):
|
def order_peasants(number):
|
||||||
print(f"Trying to order {number} peasants.")
|
print(f"Trying to order {number} peasants.")
|
||||||
@@ -18,7 +29,8 @@ def select_town_center():
|
|||||||
def follow_points(points):
|
def follow_points(points):
|
||||||
pyautogui.keyDown('shift')
|
pyautogui.keyDown('shift')
|
||||||
for x,y in points:
|
for x,y in points:
|
||||||
pyautogui.rightClick(x,y)
|
#pyautogui.rightClick(x,y)
|
||||||
|
right_click(x,y)
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
pyautogui.keyUp('shift')
|
pyautogui.keyUp('shift')
|
||||||
pyautogui.move(-500, -500) # reset mouse
|
|
||||||
@@ -125,6 +125,7 @@ def collect_screenshots():
|
|||||||
cnt = 0
|
cnt = 0
|
||||||
while True: #keyboard.is_pressed('q') == False:
|
while True: #keyboard.is_pressed('q') == False:
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
read_resources()
|
||||||
pic = pyautogui.screenshot()
|
pic = pyautogui.screenshot()
|
||||||
pic.save(f"image_{cnt}.png")
|
pic.save(f"image_{cnt}.png")
|
||||||
cnt += 1
|
cnt += 1
|
||||||
@@ -135,8 +136,8 @@ def find_town_center_in_map():
|
|||||||
# Map: 1546 886 - 1920 1080
|
# Map: 1546 886 - 1920 1080
|
||||||
width, height = image.size
|
width, height = image.size
|
||||||
is_white = lambda r,g,b: r == 255 and b == 255 and g == 255
|
is_white = lambda r,g,b: r == 255 and b == 255 and g == 255
|
||||||
for x in range(1546, width, 3):
|
for x in range(1546, width, 1):
|
||||||
for y in range(886, height, 3):
|
for y in range(886, height, 1):
|
||||||
r,g,b = image.getpixel((x,y))
|
r,g,b = image.getpixel((x,y))
|
||||||
if is_white(r,g,b):
|
if is_white(r,g,b):
|
||||||
top = find_top(image, x, y, is_white)
|
top = find_top(image, x, y, is_white)
|
||||||
@@ -215,3 +216,27 @@ def initial_scout_trace():
|
|||||||
print(f"scout_trace: {scout_trace}")
|
print(f"scout_trace: {scout_trace}")
|
||||||
return scout_trace + center + inner_circle + mid_circle + outer_circle
|
return scout_trace + center + inner_circle + mid_circle + outer_circle
|
||||||
|
|
||||||
|
RESOURCES_LOCATIONS = {
|
||||||
|
#'villager_wood': (28,39,20,10),
|
||||||
|
'wood': (50,20,50,20),
|
||||||
|
#'villager_food': (128,39,20,10),
|
||||||
|
'food': (150,20,50,20),
|
||||||
|
#'villager_gold': (228,39,20,10),
|
||||||
|
'gold': (250,20,50,20),
|
||||||
|
#'villager_stone': (330,39,20,10),
|
||||||
|
'stone': (350,20,50,20)#,
|
||||||
|
#'villager_houses': (450, 20, 70, 20),
|
||||||
|
#'free_villagers': (543,32,11,11)
|
||||||
|
}
|
||||||
|
|
||||||
|
def read_resources():
|
||||||
|
res = {}
|
||||||
|
for topic in RESOURCES_LOCATIONS.keys():
|
||||||
|
img = pyautogui.screenshot(region=RESOURCES_LOCATIONS[topic])
|
||||||
|
txt = aoe_recognition_ocr.recognize_number(img)
|
||||||
|
print(topic, txt)
|
||||||
|
#img.save(f"{topic}.png")
|
||||||
|
if txt == "":
|
||||||
|
txt = 0
|
||||||
|
res[topic] = int(txt)
|
||||||
|
return res
|
||||||
@@ -22,7 +22,39 @@ def recognize_text(img_pil):
|
|||||||
out_below = re.sub(r'[^a-zA-Z0-9_\s]', '', out_below)
|
out_below = re.sub(r'[^a-zA-Z0-9_\s]', '', out_below)
|
||||||
out_below = re.sub(r'[\s]', ' ', out_below).strip()
|
out_below = re.sub(r'[\s]', ' ', out_below).strip()
|
||||||
|
|
||||||
print("OCR OUTPUT:", out_below)
|
#print("OCR OUTPUT:", out_below)
|
||||||
return out_below
|
return out_below
|
||||||
|
|
||||||
#recognize_text(cv2.imread('ocr_test.png'))
|
#recognize_text(cv2.imread('ocr_test.png'))
|
||||||
|
|
||||||
|
import random
|
||||||
|
def recognize_number(img_pil):
|
||||||
|
#img = cv2.imread('ocr_test.png')
|
||||||
|
img = np.asarray(img_pil)
|
||||||
|
bordersize = 5
|
||||||
|
row, col = img.shape[:2]
|
||||||
|
bottom = img[row-2:row, 0:col]
|
||||||
|
mean = cv2.mean(bottom)[0]
|
||||||
|
img = cv2.copyMakeBorder(
|
||||||
|
img,
|
||||||
|
top=bordersize,
|
||||||
|
bottom=bordersize,
|
||||||
|
left=bordersize,
|
||||||
|
right=bordersize,
|
||||||
|
borderType=cv2.BORDER_CONSTANT,
|
||||||
|
value=[mean, mean, mean]
|
||||||
|
)
|
||||||
|
|
||||||
|
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
|
||||||
|
gray, img_bin = cv2.threshold(gray,128,255,cv2.THRESH_BINARY | cv2.THRESH_OTSU)
|
||||||
|
gray = cv2.bitwise_not(img_bin)
|
||||||
|
|
||||||
|
tesseract_config = "-c tessedit_char_whitelist=0123456789/"
|
||||||
|
kernel = np.ones((2, 1), np.uint8)
|
||||||
|
img = cv2.erode(gray, kernel, iterations=1)
|
||||||
|
img = cv2.dilate(img, kernel, iterations=1)
|
||||||
|
#cv2.imwrite(f"OCR_{random.randint(0,9999)}.png", img)
|
||||||
|
out_below = pytesseract.image_to_string(img, config=tesseract_config)
|
||||||
|
out_below = out_below.strip()
|
||||||
|
#print("OCR OUTPUT:", out_below)
|
||||||
|
return out_below
|
||||||
@@ -41,11 +41,13 @@ def build_houses():
|
|||||||
pyautogui.press('q')
|
pyautogui.press('q')
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
pyautogui.click(650, 90)
|
pyautogui.click(650, 90)
|
||||||
|
aoe_commands.follow_points([(800, 500)])
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
|
|
||||||
pyautogui.press('3')
|
pyautogui.press('3')
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
pyautogui.rightClick(650, 90)
|
pyautogui.rightClick(650, 90)
|
||||||
|
aoe_commands.follow_points([(800, 500)])
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
|
|
||||||
pyautogui.press('4')
|
pyautogui.press('4')
|
||||||
@@ -56,14 +58,16 @@ def build_houses():
|
|||||||
pyautogui.press('q')
|
pyautogui.press('q')
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
pyautogui.click(300, 200)
|
pyautogui.click(300, 200)
|
||||||
|
aoe_commands.follow_points([(800, 500)])
|
||||||
time.sleep(WAIT4CLICK)
|
time.sleep(WAIT4CLICK)
|
||||||
|
|
||||||
def run_start_routine():
|
def run_start_routine():
|
||||||
aoe_commands.order_peasants(4)
|
aoe_commands.order_peasants(int(aoe_recognition.read_resources()['food'] / 50)) # each villager costs 50 gold
|
||||||
assign_hotkeys()
|
assign_hotkeys()
|
||||||
build_houses()
|
build_houses()
|
||||||
xxa = aoe_recognition.initial_scout_trace()
|
xxa = aoe_recognition.initial_scout_trace()
|
||||||
pyautogui.press('1')
|
pyautogui.press('1')
|
||||||
pyautogui.press('1')
|
pyautogui.press('1')
|
||||||
aoe_commands.follow_points(xxa)
|
aoe_commands.follow_points(xxa)
|
||||||
|
pyautogui.move(-500, -500) # reset mouse
|
||||||
pyautogui.press('h')
|
pyautogui.press('h')
|
||||||
Reference in New Issue
Block a user