In a smartphone-based logic, JavaScript supports accelerometer and/or gyroscope measures for device motion and device orientation. The
devicemotion
anddeviceorientation
events are devoted to this task.Example Google_cardboard_three_js.zip
window.addEventListener('deviceorientation', event => { data.innerHTML = "'z' motion (0/360): " + event.alpha + "
"; data.innerHTML += "'x' motion (-180/180): " + event.beta + "
"; data.innerHTML += "'y' motion (-90/90): " + event.gamma; }); // Three.js support: const device_orientation_controls = new THREE.DeviceOrientationControls(camera, true); device_orientation_controls.connect(); … device_orientation_controls.update(); // Animation loop...
Rule(s)
- Geolocation in JavaScript is a native facility of browsers to get the device's latitude and longitude. While smartphones has a devoted chip for that using the American or European (a.k.a. Galileo) Global Positioning System (GPS), desktop computers have (non-normalized) proprietary implementations like IP address triangulation.
Geolocation facility availability
window.console.assert(window.navigator.geolocation !== null);
Rule(s)
- Geolocation is associated with Web data freely offered ou sold by data providers like Google Maps Platform (paying) or OpenStreetMap (free). Data access is based on JavaScript APIs, respectively Google Maps Platform APIs and Leaflet.
Geolocation, opening facility (OpenStreetMap)
Example Geolocation.js.zip
class OpenStreetMap_API { … static _Levallois_Perret() { return {latitude: 48.893217, longitude: 2.287864, name: "Levallois-Perret"}; } constructor(latitude = OpenStreetMap_API._Levallois_Perret().latitude, longitude = OpenStreetMap_API._Levallois_Perret().longitude) { this._latitude = latitude; this._longitude = longitude; this._map = window.document.getElementById('map'); // Get the display area (DOM must be ready)... this._map.style.height = window.screen.height + "px"; this._options = { // 'PositionOptions' timeout: 5000, // 'Infinity' (default): 'getCurrentPosition' won't return until the position is available... maximumAge: 1000, // '0' (default): the device cannot use a cached position; it must retrieve the real current position... enableHighAccuracy: true // Longer... }; …
if (window.navigator.geolocation !== null) // 'Geolocation' interface this._geolocation_service = window.navigator.geolocation.watchPosition( // Run the geolocation facility... this._geolocation.bind(this), // Success... (failure) => this._inform(failure.code + ": " + failure.message), this._options); else this._inform("'window.navigator.geolocation === null'");
Geolocation, closing facility (OpenStreetMap)
Example Geolocation.js.zip
class OpenStreetMap_API { … _geolocation(position /*: GeolocationPosition*/) { // https://developer.mozilla.org/en-US/docs/Web/API/GeolocationPosition // https://developer.mozilla.org/en-US/docs/Web/API/GeolocationCoordinates: this._map_.setView([position.coords.latitude, position.coords.longitude], /* zoom */ 10); const marker = L.marker([position.coords.latitude, position.coords.longitude], {icon: OpenStreetMap_API._Franck()}).addTo(this._map_); marker.bindPopup("Franck").openPopup(); const popup = L.popup(); this._map_.on('click', event => { // Display latitude and longitude... popup.setLatLng(event.latlng).setContent(event.latlng.toString()).openOn(this._map_); }); // Closing geolocation facility: window.navigator.geolocation.clearWatch(this._geolocation_service); this._inform("Geolocation service is stopped...", 1000); } }
In a standardization logic, JavaScript lets access to device equipment with the capability of testing presence and availability. Typically, the Vibration API may enact the vibrator, if any, in a software manner.
Example Vibration_API.ts.zip
window.addEventListener("DOMContentLoaded", () => { // DOM just loaded... window.console.assert(window.document.readyState !== 'loading'); const FB: HTMLImageElement = window.document.getElementById("FB") as HTMLImageElement; let angle = 10; const interval_id = window.setInterval(() => { FB.style.transform = "rotate(" + angle + "deg)"; angle = -angle; window.navigator.vibrate(50); }, 100); FB.addEventListener('click', () => { window.clearInterval(interval_id); window.document.getElementById("Feel_it_")!.innerHTML = "Vibration just stops..."; }); });
The Web Audio API benefits from being used through high-end devoted libraries, notably SOUNDJS.
Example Theo_is_crying.ts.zip
createjs.Sound.on('fileload', event => { // This is fired for each sound that is registered... // 'PlayPropsConfig' class of SOUNDJS API: const instance = createjs.Sound.play(event.id, {interrupt: createjs.Sound.INTERRUPT_ANY, loop: 1, volume: 0.5, duration: 500}); instance.on('complete', () => { window.console.log("End of: " + event.id); }); }); createjs.Sound.registerSound("./sounds/Baby_laughing.mp3", "Baby_laughing_sound_ID"); … createjs.Sound.registerSound("./sounds/Baby_crying.mp3", "Baby_crying_sound_ID"); … const result = createjs.Sound.play("Baby_crying_sound_ID"); // Potential bug: the sound may not yet be loaded... if (result.playState !== createjs.Sound.PLAY_SUCCEEDED) window.console.warn(result.src + ": " + result.playState);
WebRTC standing for Web Real-Time Communications allows the direct access to input device, namely webcams and microphones.
Rule(s)
- One may test the available multimedia capabilities of a given machine and its associated browser from, for instance, here…
- One may also specifically test the launching of the camera here…
Example (finding available devices)
// 'window.navigator.mediaDevices !== undefined' -> WebRTC available! window.navigator.mediaDevices.enumerateDevices().then((devices: MediaDeviceInfo[]) => { devices.forEach((device: MediaDeviceInfo) => { window.alert(device.kind + ": " + device.label + " id = " + device.deviceId); }); }).catch(function (error) { window.console.log(error.name + ": " + error.message); });
Rule(s)
- Because of privacy issues, the browser requires a user's agreement before going on… Note that Chrome requires the user's agreement only once (and records it for future sessions) while Firefox, for instance, asks for this agreement several times.
Example Selfie.ts.zip
<video id="my_video" width="200" height="300" poster="./img/FranckBarbier.jpg"></video>
declare const createjs: any; Object.defineProperty(window, "Snapshot", { value: new Promise(send => { createjs.Sound.on('fileload', (event: { id: string }) => { send(createjs.Sound.createInstance(event.id)); }); createjs.Sound.registerSound("./sounds/Snapshot.mp3", "./sounds/Snapshot.mp3"); }), enumerable: false, configurable: false, writable: false }); let _DOM_ready = null; Object.defineProperty(window, "DOM_ready", { value: new Promise(launched_function_when_DOM_ready => { _DOM_ready = launched_function_when_DOM_ready; }), enumerable: false, configurable: false, writable: false }); window.document.onreadystatechange = _DOM_ready; (window as any).DOM_ready.then(() => { // DOM is ready as a promise... /* Webcam shooting */ // Tested with Chrome >= 68, Firefox >= 62, Edge >= 42 and Safari >= 11 // WebRTC camera simple test: https://webrtc.github.io/samples/src/content/getusermedia/gum/ // HTML tag: '': const _my_video = window.document.getElementById('my_video') as HTMLVideoElement; _my_video.addEventListener('canplay', () => { window.console.log("The video is playing..."); window.console.log("_my_video.videoHeight: " + _my_video.videoHeight + " _my_video.videoWidth: " + _my_video.videoWidth); }); const _working_canvas = window.document.createElement('canvas') as HTMLCanvasElement; // window.alert("typeof window.navigator.mediaDevices.getUserMedia: " + typeof window.navigator.mediaDevices.getUserMedia); // 'function' // window.alert("Is Promise? " + (window.navigator.mediaDevices.getUserMedia({ // audio: false, // video: true // }) instanceof Promise)); // 'true' // Safari: see also "Development" tab, option WebRTC /* Not supported by Safari : {video: {width: 512, height: 512, facingMode: 'user'}} */ // Looking at device capabilities: // window.alert(JSON.stringify(window.navigator.mediaDevices.getSupportedConstraints())); let constraints: { audio: boolean, video: boolean | { facingMode: 'user' } } = {audio: false, video: true}; if (window.navigator.mediaDevices.getSupportedConstraints().hasOwnProperty('facingMode')) { constraints = {audio: false, video: {facingMode: 'user'}}; // Selfie mode... } // For test: // window.navigator.mediaDevices.getUserMedia(constraints).then((stream: MediaStream) => { // _my_video.srcObject = stream; // Assign the webcam to the "my_video" HTML element... // _my_video.play(); // Start playing video... // }).catch((error) => { // window.alert("window.navigator.mediaDevices.getUserMedia: " + error.message); // }); const _video_management = () => { if (_my_video.srcObject !== null) { // Video is running, take photography: if ((_my_video.srcObject as MediaStream).getVideoTracks()[0].readyState === 'live') { (window as any).Snapshot.then((sound: any) => { sound.play(); }); // Before stopping the camera, one records the last frame: _working_canvas.setAttribute('width', _my_video.videoWidth.toString()); _working_canvas.setAttribute('height', _my_video.videoHeight.toString()); _working_canvas.getContext('2d')!.drawImage(_my_video, 0, 0, _my_video.videoWidth, _my_video.videoHeight); const _selfie = new Image(); _selfie.onload = () => { // This immediately stops the webcam (Firefox keeps the last frame on screen while Chrome generates a black screen): (_my_video.srcObject as MediaStream).getVideoTracks()[0].stop(); // The stream is no longer active: window.console.assert((_my_video.srcObject as MediaStream).active === false); window.console.assert((_my_video.srcObject as MediaStream).getVideoTracks()[0].readyState === 'ended'); // The stream is detached from the '<video>' tag: _my_video.srcObject = null; // For next shoot... _my_video.setAttribute('poster', _selfie.src); }; _selfie.src = _working_canvas.toDataURL("image/png"); // From canvas to image... } else window.alert("_my_video.srcObject.getVideoTracks()[0].readyState === 'ended'"); } else { // New stream is required (with permission?) // Firefox asks for permission again... window.navigator.mediaDevices.getUserMedia(constraints).then((stream: MediaStream) => { // Safari issue here: // https://help.milkshake.tv/hc/en-us/articles/115005464689-Videos-not-playing-when-using-Safari-11-web-browser _my_video.srcObject = stream; // Assign the webcam to the "my_video" HTML element _my_video.play(); // Start playing video... }).catch((error) => { window.alert("window.navigator.mediaDevices.getUserMedia: " + error.message); }); } }; if (window.PointerEvent) // May not work for some browsers... _my_video.onpointerup = _video_management; else // Older Safari versions do not support 'window.PointerEvent': https://caniuse.com/#feat=pointer _my_video.addEventListener('mouseup', _video_management, true); });
Rule(s)
- Web services are server-side programs accessible throughout the Web. By appropriate calls in JavaScript, one is able to get any data and to reduce computations on the client side.
- The way by which Web services may, for some categories of Web services, be attained and called is documented through the Web Service Description Language (WSDL).
Example
- Documentation access: https://www.w3schools.com/xml/tempconvert.asmx?WSDL
- Browser-based test: https://www.w3schools.com/xml/tempconvert.asmx?op=CelsiusToFahrenheit
Get
Web_Services.ts.zipconst request = new XMLHttpRequest(); request.onreadystatechange = () => { window.console.log(request.getAllResponseHeaders()); if (request.readyState === XMLHttpRequest.DONE) { if (request.getResponseHeader('Content-Type')?.includes('application/json')) { const response: { rates: any } = window.JSON.parse(request.responseText); window.alert("For 1 US Dollar (USD) to MAD (Moroccan Dirham): " + response.rates.MAD); } } }; // This Web site requires a license key: request.open("GET", "http://openexchangerates.org/api/latest.json" + "?app_id=" + "<your license key here>", true); // 'request' must already be opened: request.send(null);
Post
Web_Services.ts.zipconst request2 = new XMLHttpRequest(); // Stuck by CORS request2.onreadystatechange = function () { if (request2.readyState === XMLHttpRequest.DONE) { window.alert(request2.responseText); } }; request2.open("POST", "http://www.w3schools.com" + "/xml/tempconvert.asmx/CelsiusToFahrenheit", true); // request2.setRequestHeader("Content-Length", 10); // The number of bytes of data in the body of the request or response request2.setRequestHeader("Content-Type", "application/x-www-form-urlencoded"); // As a HTML form request2.send("Celsius=20");
Post
usingfetch
Web_Services.ts.zipclass ReqRes { static URL() { return "https://reqres.in"; } static Path() { return "/api/users"; } static Run1(name = "FranckBarbier", job = "Trainer") { fetch(ReqRes.URL() + ReqRes.Path(), { body: JSON.stringify({ name: name, job: job }), headers: {// Mandatory to get 'result' in appropriate shape... 'accept': 'application/json', // -> response 'content-type': 'application/json; charset=UTF-8' }, method: 'POST' }).then(response => { response.json().then(result => { window.console.assert(result.name === name); // 'Req-Res' just returns data for "simulation"... window.alert("'ReqRes' ('fetch') RUN1: " + JSON.stringify(result)); }); }); } } window.onload = (event: Event) => { ReqRes.Run1(); };
Note(s)
- There is a useful Web site from which requests may be tested against several parameters: here… as, for example, https://httpbin.org/get?lang=fr&prenom=Franck&nom=Barbier.
WebSockets is a technology that promotes bi-directional full-duplex communication between (not exclusively) browsers and WebSockets servers. The general way of dealing with WebSockets in JavaScript is described here…
Example WebSockets_Tyrus_1_17.Java.zip
// Tested with Tyrus 1.17 WebSockets Java library const service = new WebSocket("ws://localhost:1963/FranckBarbier/WebSockets_illustration"); service.onmessage = (event: MessageEvent) => { console.log("Message from Java: " + event.data); }; service.onopen = (event: Event) => { console.log("service.onopen..."); const response = window.confirm(service.url + " just opened... Say 'Hi!'?"); if (response) service.send(JSON.stringify({Response: "Hi!"})); }; service.onclose = (event: CloseEvent) => { console.log("service.onclose... " + event.code); window.alert("Bye! See you later..."); // '1011': the server is terminating the connection because it encountered an unexpected condition that prevented it from fulfilling the request. }; service.onerror = (event: Event) => { window.alert("service.onerror..."); };
Web Workers is a support for dealing with a kind of multithreading. More precisely, some JavaScript code may run outside the browser so that this code lightens the load on the browser. Web Workers mainly consist in facilities to assign background tasks from a browser to a given Web Worker that runs in parallel.
Rule(s)
- A Web Worker does not access to common JavaScript stuff,
window
in particular. Theself
keyword then refers to the Web Worker's own resources.Example (“main” thread in the browser) Web_Workers.ts.zip
import * as $ from 'jquery'; declare const Swal: any; window.addEventListener('load', () => { // jQuery is used: const $canvas = $("#My_canvas").get(0) as HTMLCanvasElement; // jQuery access to DOM canvas elem. with 'My_canvas' as id. const image = new Image(); const worker = new Worker("./js/Web_Workers_parallel.js"); // Behind the browser stage, this "parallel" code is that of the worker... worker.addEventListener('message', response_from_process_image_by_worker); // Subscription about worker's response... function response_from_process_image_by_worker(message: MessageEvent) { // Handler of worker's response... Swal.fire({ text: "Worker just terminated image processing... Show it?", title: "Web Workers", icon: 'info', // Reload processed image: }).then(() => $canvas.getContext('2d')!.putImageData(message.data, 0, 0)); worker.terminate(); // Worker is no longer used, 'close' optional statement... } $(window.document).on("go!", process_image_by_worker); // Wait for image availability... function process_image_by_worker() { // Handler of image availability... if (window.Worker) // Test if the browser supports Web Workers technology... // Instance of 'ImageData' must be sent since it is processable by the new HTML5 structured clone algorithm: worker.postMessage($canvas.getContext('2d')!.getImageData(0, 0, $canvas.width, $canvas.height)); } image.onload = () => { $canvas.width = image.width; $canvas.height = image.height; $canvas.getContext('2d')!.drawImage(image, 0, 0); // Image is loaded in canvas... $(window.document).trigger("go!"); // Image availability: event triggering in jQuery... }; image.src = "./img/Image.jpg"; // Image load... });
Example (“parallel” thread outside the browser) Web_Workers.ts.zip
self.importScripts("../node_modules/chai/chai.js"); // Reused library requires local-scope load... declare const chai: any; function process(image_data: ImageData) { const buffer = new Uint32Array(image_data.data.buffer); for (let pixel_number = 0; pixel_number < buffer.length; pixel_number++) { const alpha = buffer[pixel_number] >>> 24; // Most left byte let blue = (buffer[pixel_number] & 0x00FF0000) >> 16; let green = (buffer[pixel_number] & 0x0000FF00) >> 8; let red = buffer[pixel_number] & 0x000000FF; // Most right byte // Simplistic image processing: blue = ~blue; green = ~green; // red = ~red; buffer[pixel_number] = (alpha << 24) | (blue << 16) | (green << 8) | red; } }
The Speech synthesis mainly works with Chrome, intro. here….
The Speech recognition mainly works with Chrome, intro. here….