From d436a1cb8aa26f1c8b6ada827cdaaeabd07e685b Mon Sep 17 00:00:00 2001 From: zhadko <130551128+Antonio20012@users.noreply.github.com> Date: Sat, 4 May 2024 11:23:39 +0300 Subject: [PATCH 01/11] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 77 +++++++++++++++---- 1 file changed, 62 insertions(+), 15 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index 8c7d1557..ac194919 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -1,4 +1,5 @@ import {FMnistData} from './fashion-data.js'; + var canvas, ctx, saveButton, clearButton; var pos = {x:0, y:0}; var rawImage; @@ -15,12 +16,50 @@ function getModel() { // HINT: Take a look at the MNIST example. model = tf.sequential(); - // YOUR CODE HERE - - - // Compile the model using the categoricalCrossentropy loss, + // Add the first convolutional layer + model.add(tf.layers.conv2d({ + inputShape: [28, 28, 1], + kernelSize: 3, + filters: 32, + activation: 'relu', + kernelInitializer: 'varianceScaling' + })); + + // Add a max pooling layer + model.add(tf.layers.maxPooling2d({poolSize: [2, 2]})); + + // Add another convolutional layer + model.add(tf.layers.conv2d({ + kernelSize: 3, + filters: 64, + activation: 'relu' + })); + + // Add a max pooling layer + model.add(tf.layers.maxPooling2d({poolSize: [2, 2]})); + + // Add a flatten layer + model.add(tf.layers.flatten()); + + // Add a dense layer + model.add(tf.layers.dense({ + units: 128, + activation: 'relu' + })); + + // Add the output layer + model.add(tf.layers.dense({ + units: 10, + activation: 'softmax' + })); + + // Compile the model using categoricalCrossentropy loss, // the tf.train.adam() optimizer, and `acc` for your metrics. - model.compile(// YOUR CODE HERE); + model.compile({ + optimizer: tf.train.adam(), + loss: 'categoricalCrossentropy', + metrics: ['accuracy'] + }); return model; } @@ -28,17 +67,18 @@ function getModel() { async function train(model, data) { // Set the following metrics for the callback: 'loss', 'val_loss', 'acc', 'val_acc'. - const metrics = // YOUR CODE HERE + const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; // Create the container for the callback. Set the name to 'Model Training' and // use a height of 1000px for the styles. - const container = // YOUR CODE HERE + const container = document.getElementById('main'); + container.style = 'height: 1000px;'; // Use tfvis.show.fitCallbacks() to setup the callbacks. // Use the container and metrics defined above as the parameters. - const fitCallbacks = // YOUR CODE HERE + const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); const BATCH_SIZE = 512; const TRAIN_DATA_SIZE = 6000; @@ -47,14 +87,24 @@ async function train(model, data) { // Get the training batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [trainXs, trainYs] = // YOUR CODE HERE - + const [trainXs, trainYs] = tf.tidy(() => { + const d = data.nextTrainBatch(TRAIN_DATA_SIZE); + return [ + d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]), + d.labels + ]; + }); // Get the testing batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [testXs, testYs] = // YOUR CODE HERE - + const [testXs, testYs] = tf.tidy(() => { + const d = data.nextTestBatch(TEST_DATA_SIZE); + return [ + d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]), + d.labels + ]; + }); return model.fit(trainXs, trainYs, { batchSize: BATCH_SIZE, @@ -132,6 +182,3 @@ async function run() { } document.addEventListener('DOMContentLoaded', run); - - - From 74cc5f6e713fe83e32afd2fa13ca91acceae8dc0 Mon Sep 17 00:00:00 2001 From: zhadko <130551128+Antonio20012@users.noreply.github.com> Date: Sat, 4 May 2024 11:27:45 +0300 Subject: [PATCH 02/11] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 80 ++++++++++--------- 1 file changed, 44 insertions(+), 36 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index ac194919..79a92d74 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -1,12 +1,12 @@ -import {FMnistData} from './fashion-data.js'; +import { FMnistData } from './fashion-data.js'; var canvas, ctx, saveButton, clearButton; -var pos = {x:0, y:0}; +var pos = { x: 0, y: 0 }; var rawImage; var model; function getModel() { - + // In the space below create a convolutional neural network that can classify the // images of articles of clothing in the Fashion MNIST dataset. Your convolutional // neural network should only use the following layers: conv2d, maxPooling2d, @@ -15,7 +15,7 @@ function getModel() { // many layers, filters, and neurons as you like. // HINT: Take a look at the MNIST example. model = tf.sequential(); - + // Add the first convolutional layer model.add(tf.layers.conv2d({ inputShape: [28, 28, 1], @@ -26,7 +26,7 @@ function getModel() { })); // Add a max pooling layer - model.add(tf.layers.maxPooling2d({poolSize: [2, 2]})); + model.add(tf.layers.maxPooling2d({ poolSize: [2, 2] })); // Add another convolutional layer model.add(tf.layers.conv2d({ @@ -36,7 +36,7 @@ function getModel() { })); // Add a max pooling layer - model.add(tf.layers.maxPooling2d({poolSize: [2, 2]})); + model.add(tf.layers.maxPooling2d({ poolSize: [2, 2] })); // Add a flatten layer model.add(tf.layers.flatten()); @@ -60,30 +60,29 @@ function getModel() { loss: 'categoricalCrossentropy', metrics: ['accuracy'] }); - + return model; } async function train(model, data) { - + // Set the following metrics for the callback: 'loss', 'val_loss', 'acc', 'val_acc'. - const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; + const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; - // Create the container for the callback. Set the name to 'Model Training' and // use a height of 1000px for the styles. const container = document.getElementById('main'); container.style = 'height: 1000px;'; - - + + // Use tfvis.show.fitCallbacks() to setup the callbacks. // Use the container and metrics defined above as the parameters. const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); - + const BATCH_SIZE = 512; const TRAIN_DATA_SIZE = 6000; const TEST_DATA_SIZE = 1000; - + // Get the training batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. @@ -94,7 +93,7 @@ async function train(model, data) { d.labels ]; }); - + // Get the testing batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. @@ -105,7 +104,7 @@ async function train(model, data) { d.labels ]; }); - + return model.fit(trainXs, trainYs, { batchSize: BATCH_SIZE, validationData: [testXs, testYs], @@ -115,13 +114,13 @@ async function train(model, data) { }); } -function setPosition(e){ - pos.x = e.clientX-100; - pos.y = e.clientY-100; +function setPosition(e) { + pos.x = e.clientX - 100; + pos.y = e.clientY - 100; } - + function draw(e) { - if(e.buttons!=1) return; + if (e.buttons != 1) return; ctx.beginPath(); ctx.lineWidth = 24; ctx.lineCap = 'round'; @@ -132,34 +131,35 @@ function draw(e) { ctx.stroke(); rawImage.src = canvas.toDataURL('image/png'); } - + function erase() { ctx.fillStyle = "black"; - ctx.fillRect(0,0,280,280); + ctx.fillRect(0, 0, 280, 280); } - + function save() { - var raw = tf.browser.fromPixels(rawImage,1); - var resized = tf.image.resizeBilinear(raw, [28,28]); + var raw = tf.browser.fromPixels(rawImage, 1); + var resized = tf.image.resizeBilinear(raw, [28, 28]); var tensor = resized.expandDims(0); - + var prediction = model.predict(tensor); var pIndex = tf.argMax(prediction, 1).dataSync(); - - var classNames = ["T-shirt/top", "Trouser", "Pullover", - "Dress", "Coat", "Sandal", "Shirt", - "Sneaker", "Bag", "Ankle boot"]; - - + + var classNames = ["T-shirt/top", "Trouser", "Pullover", + "Dress", "Coat", "Sandal", "Shirt", + "Sneaker", "Bag", "Ankle boot" + ]; + + alert(classNames[pIndex]); } - + function init() { canvas = document.getElementById('canvas'); rawImage = document.getElementById('canvasimg'); ctx = canvas.getContext("2d"); ctx.fillStyle = "black"; - ctx.fillRect(0,0,280,280); + ctx.fillRect(0, 0, 280, 280); canvas.addEventListener("mousemove", draw); canvas.addEventListener("mousedown", setPosition); canvas.addEventListener("mouseenter", setPosition); @@ -174,7 +174,7 @@ async function run() { const data = new FMnistData(); await data.load(); const model = getModel(); - tfvis.show.modelSummary({name: 'Model Architecture'}, model); + tfvis.show.modelSummary({ name: 'Model Architecture' }, model); await train(model, data); await model.save('downloads://my_model'); init(); @@ -182,3 +182,11 @@ async function run() { } document.addEventListener('DOMContentLoaded', run); + +document.addEventListener('click', function() { + console.log('click event'); +}); + +window.addEventListener('error', function(e) { + console.error('Error:', e.message); +}); From 04514fedc58921bd56ee5c2f76a972e33a43f4ae Mon Sep 17 00:00:00 2001 From: zhadko <130551128+Antonio20012@users.noreply.github.com> Date: Sat, 4 May 2024 11:28:16 +0300 Subject: [PATCH 03/11] Update fashion-mnist.html --- C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html b/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html index 52f6254a..688a8493 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html +++ b/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html @@ -12,6 +12,6 @@