From c918172088a15cb542edd3c915385b4a11b7b7f7 Mon Sep 17 00:00:00 2001 From: Jaewook Lee Date: Thu, 23 Jan 2025 20:20:07 +0900 Subject: [PATCH] initial commit --- README.md | 7 +++++++ app.py | 13 +++++++++++++ ocr.applescript | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+) create mode 100644 README.md create mode 100644 app.py create mode 100644 ocr.applescript diff --git a/README.md b/README.md new file mode 100644 index 0000000..b2497fc --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# Run +```bash +python app.py +``` + +# Open +[http://localhost:8501](http://localhost:8501) \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000..1d66692 --- /dev/null +++ b/app.py @@ -0,0 +1,13 @@ +# http://localhost:8501 +try: import os, io, tempfile, streamlit +except: os.system('pip install -q streamlit') +finally: import streamlit as st +if st.runtime.exists(): + st.title('맥모닝 OCR') + if img := st.file_uploader("Choose a file"): + st.image(io.BytesIO(blob := img.getvalue())) + with tempfile.TemporaryDirectory() as tmp, \ + open(path := f'{tmp}/{img.name}', 'wb') as f: + f.write(blob) + st.code(os.popen(f'osascript ocr.applescript {path}').read()) +else: os.system('streamlit run app.py --browser.gatherUsageStats false') \ No newline at end of file diff --git a/ocr.applescript b/ocr.applescript new file mode 100644 index 0000000..f715ea8 --- /dev/null +++ b/ocr.applescript @@ -0,0 +1,34 @@ +-- Credit: https://stackoverflow.com/a/75779406 + +use framework "Vision" + +on run(argv) + set ap to current application + + -- Read image content + set img to ap's NSImage's alloc()'s initWithContentsOfFile:item 1 of argv + + -- Set up request handler using image's raw data + set handle to ap's VNImageRequestHandler's alloc()'s + initWithData:(img's TIFFRepresentation()) + options:(ap's NSDictionary's dictionary()) + + -- Initialize text request + set request to ap's VNRecognizeTextRequest's alloc()'s init() + + -- Set recognition level to accurate + request's setRecognitionLevel:(ap's VNRequestTextRecognitionLevelAccurate) + + -- Set recognition language to Korean + request's setRecognitionLanguages:(ap's NSArray's arrayWithObject:"ko-KR") + + -- Perform the request and get the results + handle's performRequests:{request} |error|:(missing value) + + -- Obtain and return the string values of the results + set res to "" + repeat with ocr in request's results() + set res to res & ((first item in (ocr's topCandidates:1))'s |string|()) & linefeed + end repeat + return res +end run