can ZIM easy do the same Googledervice intergation as
https://www.chenyuho.com/project/handwritingjs/
so cool if it works for ZIM because we have the drawing pen new Pen() functionality..
thanks
can ZIM easy do the same Googledervice intergation as
https://www.chenyuho.com/project/handwritingjs/
so cool if it works for ZIM because we have the drawing pen new Pen() functionality..
thanks
found an example idea for ZIM
I tried with ZIM code but seems not recognizing the text
<html>
<head>
<title>ZIM Handwriting Recognition</title>
<script>
// Simple ML Neural Network for Handwriting Recognition
class SimpleML {
constructor() {
// Initialize weights for a simple neural network
// This is a basic implementation for demonstration
this.weights = Array(784).fill().map(() => Math.random() - 0.5);
this.outputLabels = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split('');
}
recognize(imageData) {
// Convert image data to features (normalize pixel values)
const features = Array.from(imageData.data)
.filter((_, i) => i % 4 === 0) // Take only red channel
.map(x => x / 255); // Normalize to 0-1
// Simple forward pass
let maxScore = -Infinity;
let predictedIndex = 0;
for (let i = 0; i < this.outputLabels.length; i++) {
const score = features.reduce((sum, x, j) => sum + x * this.weights[j], 0);
if (score > maxScore) {
maxScore = score;
predictedIndex = i;
}
}
return {
label: this.outputLabels[predictedIndex],
confidence: Math.min(Math.max(maxScore, 0), 1)
};
}
}
</script>
<style>
body {
margin: 0;
padding: 20px;
background: #f0f0f0;
font-family: Arial, sans-serif;
display: flex;
flex-direction: column;
align-items: center;
}
#myCanvas {
background: white;
border: 1px solid #ccc;
margin: 20px 0;
}
.controls {
margin: 10px 0;
}
button {
padding: 10px 20px;
margin: 0 5px;
font-size: 16px;
cursor: pointer;
background: #4CAF50;
color: white;
border: none;
border-radius: 4px;
}
button:hover {
background: #45a049;
}
#result {
font-size: 18px;
margin: 10px 0;
padding: 10px;
min-width: 200px;
text-align: center;
border: 1px solid #ccc;
background: white;
}
</style>
</head>
<body>
<div id="canvasContainer">
<canvas id="myCanvas" width="280" height="280"></canvas>
</div>
<div class="controls">
<button id="recognizeBtn">Recognize</button>
<button id="clearBtn">Clear</button>
</div>
<div id="result">Draw a single character (letter or number)</div>
<script type=module>
import "https://zimjs.org/cdn/017/zim_pizzazz";
new Frame({scaling:"myCanvas",
width: 280,
height: 280,
color: "#fff",
ready:ready ,
canvasID:"myCanvas"
});
function ready(frame, stage, stageW, stageH){
const drawing = new Shape();
stage.addChild(drawing);
// Initialize Simple-ML recognizer
const recognizer = new SimpleML();
let isDrawing = false;
let lastX, lastY;
// Mouse events for drawing
stage.on("stagemousedown", (e) => {
isDrawing = true;
lastX = e.stageX;
lastY = e.stageY;
});
stage.on("stagemousemove", (e) => {
if (!isDrawing) return;
drawing.graphics.setStrokeStyle(16, "round")
.beginStroke("#000")
.moveTo(lastX, lastY)
.lineTo(e.stageX, e.stageY);
lastX = e.stageX;
lastY = e.stageY;
stage.update();
});
stage.on("stagemouseup", () => {
isDrawing = false;
});
// Recognition button
document.getElementById("recognizeBtn").onclick = () => {
// Get canvas context and image data
const context = frame.canvas.getContext('2d');
const imageData = context.getImageData(0, 0, frame.width, frame.height);
// Perform recognition
const result = recognizer.recognize(imageData);
document.getElementById("result").innerText =
`Recognized: ${result.label} (Confidence: ${(result.confidence * 100).toFixed(1)}%)`;
};
// Clear button
document.getElementById("clearBtn").onclick = () => {
drawing.graphics.clear();
stage.update();
document.getElementById("result").innerText = "Draw a single character (letter or number)";
};
}
</script>
</body>
</html>
any idea why it is not working?
I've created a simple implementation that combines:
Key features:
Important notes:
thanks
when using new handwriting.recognize it isn't working
so can you add it to tthe libary of ZIM?
<html>
<head>
<title>Advanced Handwriting Recognition</title>
<script
src="https://raw.githubusercontent.com/ChenYuHo/handwriting.js/blob/master/handwriting.js"
></script>
<style>
body {
margin: 0;
padding: 20px;
background: #f0f0f0;
font-family: Arial, sans-serif;
}
.container {
max-width: 800px;
margin: 0 auto;
text-align: center;
}
#myCanvas {
background: white;
border: 1px solid #ccc;
margin: 20px 0;
}
.controls {
margin: 15px 0;
padding: 10px;
background: white;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
}
button {
padding: 10px 20px;
margin: 5px;
font-size: 16px;
cursor: pointer;
background: #4CAF50;
color: white;
border: none;
border-radius: 4px;
}
button:hover {
background: #45a049;
}
.color-picker {
margin: 10px 0;
}
.color-option {
width: 30px;
height: 30px;
margin: 0 5px;
border: 2px solid #ccc;
border-radius: 50%;
display: inline-block;
cursor: pointer;
}
.color-option.active {
border-color: #000;
}
#result {
font-size: 18px;
margin: 10px 0;
padding: 15px;
border: 1px solid #ccc;
border-radius: 4px;
background: white;
}
.stroke-width {
margin: 10px 0;
}
input[type="range"] {
width: 200px;
margin: 0 10px;
}
</style>
</head>
<body>
<div class="container">
<h1>Advanced Handwriting Recognition</h1>
<canvas id="myCanvas" width="600" height="400"></canvas>
<div class="controls">
<div class="color-picker">
<div class="color-option active" style="background: #000000" data-color="#000000"></div>
<div class="color-option" style="background: #ff0000" data-color="#ff0000"></div>
<div class="color-option" style="background: #0000ff" data-color="#0000ff"></div>
<div class="color-option" style="background: #00ff00" data-color="#00ff00"></div>
</div>
<div class="stroke-width">
Stroke Width: <input type="range" id="strokeWidth" min="1" max="20" value="2">
<span id="strokeValue">2</span>px
</div>
<button id="recognizeBtn">Recognize</button>
<button id="clearBtn">Clear</button>
<button id="undoBtn">Undo</button>
</div>
<div id="result">Write something...</div>
</div>
<script type=module>
import "https://zimjs.org/cdn/017/zim_pizzazz"
new Frame({scaling:"myCanvas",
width: 600,
height: 400,
color: "#fff",
outerColor:clear,
ready:ready,
canvasID: "myCanvas"
});
function ready(frame, stage, stageW, stageH){
let currentPath;
let currentColor = "#000000";
let currentStrokeWidth = 2;
const paths = [];
let isDrawing = false;
let points = [];
let currentStroke = [];
// Initialize handwriting recognizer
const recognizer = new handwriting.recognize();
// Color picker functionality
document.querySelectorAll('.color-option').forEach(option => {
option.addEventListener('click', (e) => {
document.querySelector('.color-option.active').classList.remove('active');
e.target.classList.add('active');
currentColor = e.target.dataset.color;
});
});
// Stroke width control
const strokeWidth = document.getElementById('strokeWidth');
const strokeValue = document.getElementById('strokeValue');
strokeWidth.addEventListener('input', (e) => {
currentStrokeWidth = e.target.value;
strokeValue.textContent = currentStrokeWidth;
});
// Drawing events
stage.on("stagemousedown", (e) => {
isDrawing = true;
currentPath = new Shape();
stage.addChild(currentPath);
currentStroke = [];
const point = [e.stageX, e.stageY];
currentStroke.push(point);
currentPath.graphics
.setStrokeStyle(currentStrokeWidth, "round")
.beginStroke(currentColor)
.moveTo(e.stageX, e.stageY);
paths.push(currentPath);
});
stage.on("stagemousemove", (e) => {
if (!isDrawing) return;
const point = [e.stageX, e.stageY];
currentStroke.push(point);
currentPath.graphics.lineTo(e.stageX, e.stageY);
stage.update();
});
stage.on("stagemouseup", () => {
isDrawing = false;
if (currentStroke.length > 0) {
points.push(currentStroke);
}
});
// Recognition button
document.getElementById("recognizeBtn").onclick = () => {
if (points.length === 0) {
document.getElementById("result").innerText = "Please write something first!";
return;
}
// Format points for recognition
const data = {
strokes: points.map(stroke => ({
x: stroke.map(p => p[0]),
y: stroke.map(p => p[1])
}))
};
// Perform recognition with preprocessing
recognizer.preprocess(data);
recognizer.recognize(data, (result) => {
if (result && result.length > 0) {
const predictions = result.map(r =>
`${r.label} (${(r.confidence * 100).toFixed(1)}%)`
).join(', ');
document.getElementById("result").innerHTML =
`Recognized: ${predictions}`;
} else {
document.getElementById("result").innerText =
"Recognition failed. Please try again.";
}
});
};
// Clear button
document.getElementById("clearBtn").onclick = () => {
while (paths.length > 0) {
stage.removeChild(paths.pop());
}
points = [];
stage.update();
document.getElementById("result").innerText = "Write something...";
};
// Undo button
document.getElementById("undoBtn").onclick = () => {
if (paths.length > 0) {
stage.removeChild(paths.pop());
points.pop();
stage.update();
}
};
}
</script>
</body>
</html>
would be cool if it would work
thanks
Sorry - you will have to work all this stuff out yourself - unless anyone else here wants to take a look.
pitty.. Dan, you introduced also
new Speech() in ZIM .. so it would be great to have
new Handwriting() in ZIM .. because no it isn't working..
I asked it to https://www.shecodes.io/athena#question-590260 but not working
@abstract this would be great also with speech .. to read a picture content
Once again... and for a final time... we are not bringing other people's libraries into ZIM. They are libraries so you can use then along with ZIM. You just have to figure out how to do it. And we are not interested in doing your work for you at this moment. Perhaps someone else might help. Sorry. That response, looks like an AI response.
If you cache() a ZIM DisplayObject like the paper property of the Pen, then you can access the paper.cacheCanvas - this is a canvas that perhaps the library can access and give a result.
super.. that is what I was asking for..
I looked up in https://zimjs.com/code.php?view=69.93&title=Pen&line=80952
but dit not found
so paper.cacheCanvas
where can I find more info about that into the docs?
that's all.. a possibility to find the canvasbitmap.. that should be nice
you can send me that code to reach the image that is generated when the pen is used onto the canvas
thanks
No - figure it out yourself. Any object that you .cace() will have a cacheCanvas property - it is built in to CreateJS. You would find it as a CreateJS property of a Container - which most ZIM things extend.
I was looking at this code and the "magic" is actually this:
var data = JSON.stringify({
"options": "enable_pre_space",
"requests": [{
"writing_guide": {
"writing_area_width": options.width || this.width || undefined,
"writing_area_height": options.height || this.width || undefined
},
"ink": trace,
"language": options.language || "zh_TW"
}]
});
xhr.open("POST", "https://www.google.com.tw/inputtools/request?ime=handwriting&app=mobilesearch&cs=1&oe=UTF-8");
xhr.setRequestHeader("content-type", "application/json");
xhr.send(data);
You don't need the whole library. I am sure you can send the ZIM pen coordinates to Google.
Super @pettis waauw I'll check it out .. so it could work to write with the pen now and see the text appearing into a new TextInput() of ZIM?
did you make a codepen?
I did not. If I have free time maybe I can workout an example.
waauw yes would be wonderfull magical and super for kids to learn correct writing.. a big thanks already!
i also found this cool idea opposite text yo your own whanwritinfont Coding my Handwriting — Amy Goodchild
and this video