From efb6453f1a997dd4b30210eec4a2f457bdeacb29 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Fri, 31 Jan 2020 11:32:12 -0800 Subject: [PATCH 1/7] Adds method stubs for face and person detection --- .gitignore | 1 + samples/analyze.v1p2beta1.js | 50 ++++++++++++++++++++++++------------ 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index c199edd9..38a485ca 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ system-test/*key.json package-lock.json .vscode __pycache__ +*.code-workspace \ No newline at end of file diff --git a/samples/analyze.v1p2beta1.js b/samples/analyze.v1p2beta1.js index 77b3f862..fad23f7c 100644 --- a/samples/analyze.v1p2beta1.js +++ b/samples/analyze.v1p2beta1.js @@ -53,11 +53,11 @@ async function analyzeTextGCS(gcsUri) { } console.log( `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`\tConfidence: ${segment.confidence}`); segment.frames.forEach(frame => { @@ -70,7 +70,7 @@ async function analyzeTextGCS(gcsUri) { } console.log( `Time offset for the frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Rotated Bounding Box Vertices:`); frame.rotatedBoundingBox.vertices.forEach(vertex => { @@ -126,10 +126,10 @@ async function analyzeObjectTrackingGCS(gcsUri) { } console.log( `Segment: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ - time.endTimeOffset.seconds - }.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ + time.endTimeOffset.seconds + }.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Confidence: ${object.confidence}`); const frame = object.frames[0]; @@ -143,7 +143,7 @@ async function analyzeObjectTrackingGCS(gcsUri) { } console.log( `Time offset for the first frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Bounding box position:`); console.log(`\tleft :${box.left}`); @@ -200,11 +200,11 @@ async function analyzeText(path) { } console.log( `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`\tConfidence: ${segment.confidence}`); segment.frames.forEach(frame => { @@ -217,7 +217,7 @@ async function analyzeText(path) { } console.log( `Time offset for the frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Rotated Bounding Box Vertices:`); frame.rotatedBoundingBox.vertices.forEach(vertex => { @@ -276,10 +276,10 @@ async function analyzeObjectTracking(path) { } console.log( `Segment: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ - time.endTimeOffset.seconds - }.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ + time.endTimeOffset.seconds + }.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Confidence: ${object.confidence}`); const frame = object.frames[0]; @@ -293,7 +293,7 @@ async function analyzeObjectTracking(path) { } console.log( `Time offset for the first frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Bounding box position:`); console.log(`\tleft :${box.left}`); @@ -303,6 +303,12 @@ async function analyzeObjectTracking(path) { }); // [END video_object_tracking_beta] } +async function analyzePersonGCS(gcsUri) { + +} +async function analyzeFaceGCS(gcsUri) { + +} async function main() { require(`yargs`) @@ -331,6 +337,18 @@ async function main() { {}, opts => analyzeObjectTracking(opts.path) ) + .command( + `detect-person `, + `Detects people in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzePersonGCS(opts.gcsUri) + ) + .command( + `detect-face `, + `Detects a person's face in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => analyzePersonGCS(opts.gcsUri) + ) .example(`node $0 video-text ./resources/googlework_short.mp4`) .example( `node $0 video-text-gcs gs://nodejs-docs-samples/video/googlework_short.mp4` From 8ea1e1f79d2215b2820ac4c0bce3c70edc6ebf82 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Wed, 5 Feb 2020 20:57:17 -0800 Subject: [PATCH 2/7] Adds Video face and people detection samples --- samples/analyze.v1p2beta1.js | 50 +-- samples/analyze.v1p3beta1.js | 380 ++++++++++++++++++ samples/system-test/analyze.v1p3beta1.test.js | 49 +++ 3 files changed, 445 insertions(+), 34 deletions(-) create mode 100644 samples/analyze.v1p3beta1.js create mode 100644 samples/system-test/analyze.v1p3beta1.test.js diff --git a/samples/analyze.v1p2beta1.js b/samples/analyze.v1p2beta1.js index fad23f7c..77b3f862 100644 --- a/samples/analyze.v1p2beta1.js +++ b/samples/analyze.v1p2beta1.js @@ -53,11 +53,11 @@ async function analyzeTextGCS(gcsUri) { } console.log( `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`\tConfidence: ${segment.confidence}`); segment.frames.forEach(frame => { @@ -70,7 +70,7 @@ async function analyzeTextGCS(gcsUri) { } console.log( `Time offset for the frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Rotated Bounding Box Vertices:`); frame.rotatedBoundingBox.vertices.forEach(vertex => { @@ -126,10 +126,10 @@ async function analyzeObjectTrackingGCS(gcsUri) { } console.log( `Segment: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ - time.endTimeOffset.seconds - }.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ + time.endTimeOffset.seconds + }.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Confidence: ${object.confidence}`); const frame = object.frames[0]; @@ -143,7 +143,7 @@ async function analyzeObjectTrackingGCS(gcsUri) { } console.log( `Time offset for the first frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Bounding box position:`); console.log(`\tleft :${box.left}`); @@ -200,11 +200,11 @@ async function analyzeText(path) { } console.log( `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`\tConfidence: ${segment.confidence}`); segment.frames.forEach(frame => { @@ -217,7 +217,7 @@ async function analyzeText(path) { } console.log( `Time offset for the frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Rotated Bounding Box Vertices:`); frame.rotatedBoundingBox.vertices.forEach(vertex => { @@ -276,10 +276,10 @@ async function analyzeObjectTracking(path) { } console.log( `Segment: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ - time.endTimeOffset.seconds - }.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s to ${ + time.endTimeOffset.seconds + }.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Confidence: ${object.confidence}`); const frame = object.frames[0]; @@ -293,7 +293,7 @@ async function analyzeObjectTracking(path) { } console.log( `Time offset for the first frame: ${timeOffset.seconds}` + - `.${(timeOffset.nanos / 1e6).toFixed(0)}s` + `.${(timeOffset.nanos / 1e6).toFixed(0)}s` ); console.log(`Bounding box position:`); console.log(`\tleft :${box.left}`); @@ -303,12 +303,6 @@ async function analyzeObjectTracking(path) { }); // [END video_object_tracking_beta] } -async function analyzePersonGCS(gcsUri) { - -} -async function analyzeFaceGCS(gcsUri) { - -} async function main() { require(`yargs`) @@ -337,18 +331,6 @@ async function main() { {}, opts => analyzeObjectTracking(opts.path) ) - .command( - `detect-person `, - `Detects people in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzePersonGCS(opts.gcsUri) - ) - .command( - `detect-face `, - `Detects a person's face in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, - {}, - opts => analyzePersonGCS(opts.gcsUri) - ) .example(`node $0 video-text ./resources/googlework_short.mp4`) .example( `node $0 video-text-gcs gs://nodejs-docs-samples/video/googlework_short.mp4` diff --git a/samples/analyze.v1p3beta1.js b/samples/analyze.v1p3beta1.js new file mode 100644 index 00000000..7726c556 --- /dev/null +++ b/samples/analyze.v1p3beta1.js @@ -0,0 +1,380 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +async function detectPerson(path) { + //[START video_detect_person_beta] + // Imports the Google Cloud Video Intelligence library + Node's fs library + const Video = require('@google-cloud/video-intelligence').v1p3beta1; + const fs = require('fs'); + const util = require('util'); + // Creates a client + const video = new Video.VideoIntelligenceServiceClient(); + + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const path = 'Local file to analyze, e.g. ./my-file.mp4'; + + // Reads a local video file and converts it to base64 + const file = await util.promisify(fs.readFile)(path); + const inputContent = file.toString('base64'); + + const request = { + inputContent: inputContent, + features: ['PERSON_DETECTION'], + videoContext: { + personDetectionConfig: { + // Must set includeBoundingBoxes to true to get poses and attributes. + includeBoundingBoxes: true, + includePoseLandmarks: true, + includeAttributes: true, + }, + }, + }; + // Detects people in a video + const [operation] = await video.annotateVideo(request); + const results = await operation.promise(); + console.log('Waiting for operation to complete...'); + + // Gets annotations for video + const personAnnotations = + results[0].annotationResults[0].personDetectionAnnotations; + + personAnnotations.forEach(personAnnotation => { + console.log('Person detected:'); + const tracks = personAnnotation.tracks; + tracks.forEach(track => { + const time = track.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + + // Each segment includes timestamped objects that + // include characteristic--e.g. clothes, posture + // of the person detected. + const [firstTimestampedObject] = track.timestampedObjects; + + // Attributes include unique pieces of clothing, + // poses, or hair color. + firstTimestampedObject.attributes.forEach(attribute => { + console.log( + `\tAttribute: ${attribute.name}; ` + `Value: ${attribute.value}` + ); + }); + // Landmarks in person detection include body parts. + firstTimestampedObject.landmarks.forEach(landmark => { + console.log( + `\tLandmark: ${landmark.name}; ` + + `Vertex: ${landmark.point.x}, ${landmark.point.y}` + ); + }); + }); + }); + + // [END video_detect_person_beta] +} +async function detectPersonGCS(gcsUri) { + //[START video_detect_person_gcs_beta] + // Imports the Google Cloud Video Intelligence library + const Video = require('@google-cloud/video-intelligence').v1p3beta1; + // Creates a client + const video = new Video.VideoIntelligenceServiceClient(); + + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const gcsUri = 'GCS URI of the video to analyze, e.g. gs://my-bucket/my-video.mp4'; + + const request = { + inputUri: gcsUri, + features: ['PERSON_DETECTION'], + videoContext: { + personDetectionConfig: { + // Must set includeBoundingBoxes to true to get poses and attributes. + includeBoundingBoxes: true, + includePoseLandmarks: true, + includeAttributes: true, + }, + }, + }; + // Detects people in a video + const [operation] = await video.annotateVideo(request); + const results = await operation.promise(); + console.log('Waiting for operation to complete...'); + + // Gets annotations for video + const personAnnotations = + results[0].annotationResults[0].personDetectionAnnotations; + + personAnnotations.forEach(personAnnotation => { + console.log('Person detected:'); + const tracks = personAnnotation.tracks; + tracks.forEach(track => { + const time = track.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + + // Each segment includes timestamped objects that + // include characteristic--e.g. clothes, posture + // of the person detected. + const [firstTimestampedObject] = track.timestampedObjects; + + // Attributes include unique pieces of clothing, + // poses, or hair color. + firstTimestampedObject.attributes.forEach(attribute => { + console.log( + `\tAttribute: ${attribute.name}; ` + `Value: ${attribute.value}` + ); + }); + // Landmarks in person detection include body parts. + firstTimestampedObject.landmarks.forEach(landmark => { + console.log( + `\tLandmark: ${landmark.name}; ` + + `Vertex: ${landmark.point.x}, ${landmark.point.y}` + ); + }); + }); + }); + + // [END video_detect_person_beta] +} +async function detectFaces(path) { + //[START video_detect_faces_beta] + // Imports the Google Cloud Video Intelligence library + Node's fs library + const Video = require('@google-cloud/video-intelligence').v1p3beta1; + const fs = require('fs'); + const util = require('util'); + // Creates a client + const video = new Video.VideoIntelligenceServiceClient(); + + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const path = 'Local file to analyze, e.g. ./my-file.mp4'; + + // Reads a local video file and converts it to base64 + const file = await util.promisify(fs.readFile)(path); + const inputContent = file.toString('base64'); + + const request = { + inputContent: inputContent, + features: ['FACE_DETECTION'], + videoContext: { + faceDetectionConfig: { + // Must set includeBoundingBoxes to true to get facial attributes. + includeBoundingBoxes: true, + includeAttributes: true, + }, + }, + }; + // Detects faces in a video + const [operation] = await video.annotateVideo(request); + const results = await operation.promise(); + console.log('Waiting for operation to complete...'); + + // Gets annotations for video + const faceAnnotations = + results[0].annotationResults[0].faceDetectionAnnotations; + + faceAnnotations.forEach(faceAnnotation => { + console.log('Face detected:'); + const tracks = faceAnnotation.tracks; + tracks.forEach(track => { + const time = track.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + + // Each segment includes timestamped objects that + // include characteristics of the face detected. + const [firstTimestapedObject] = track.timestampedObjects; + + firstTimestapedObject.attributes.forEach(attribute => { + // Attributes include unique pieces of clothing, like glasses, + // poses, or hair color. + console.log(`\tAttribute: ${attribute.name}; `); + }); + }); + }); +} +async function detectFacesGCS(gcsUri) { + //[START video_detect_faces_gcs_beta] + // Imports the Google Cloud Video Intelligence library + const Video = require('@google-cloud/video-intelligence').v1p3beta1; + // Creates a client + const video = new Video.VideoIntelligenceServiceClient(); + + /** + * TODO(developer): Uncomment the following line before running the sample. + */ + // const gcsUri = 'GCS URI of the video to analyze, e.g. gs://my-bucket/my-video.mp4'; + + const request = { + inputUri: gcsUri, + features: ['FACE_DETECTION'], + videoContext: { + faceDetectionConfig: { + // Must set includeBoundingBoxes to true to get facial attributes. + includeBoundingBoxes: true, + includeAttributes: true, + }, + }, + }; + // Detects faces in a video + const [operation] = await video.annotateVideo(request); + const results = await operation.promise(); + console.log('Waiting for operation to complete...'); + + // Gets annotations for video + const faceAnnotations = + results[0].annotationResults[0].faceDetectionAnnotations; + + faceAnnotations.forEach(faceAnnotation => { + console.log('Face detected:'); + const tracks = faceAnnotation.tracks; + tracks.forEach(track => { + const time = track.segment; + if (time.startTimeOffset.seconds === undefined) { + time.startTimeOffset.seconds = 0; + } + if (time.startTimeOffset.nanos === undefined) { + time.startTimeOffset.nanos = 0; + } + if (time.endTimeOffset.seconds === undefined) { + time.endTimeOffset.seconds = 0; + } + if (time.endTimeOffset.nanos === undefined) { + time.endTimeOffset.nanos = 0; + } + console.log( + `\tStart: ${time.startTimeOffset.seconds}` + + `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + console.log( + `\tEnd: ${time.endTimeOffset.seconds}.` + + `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + ); + + // Each segment includes timestamped objects that + // include characteristics of the face detected. + const [firstTimestapedObject] = track.timestampedObjects; + + firstTimestapedObject.attributes.forEach(attribute => { + // Attributes include unique pieces of clothing, like glasses, + // poses, or hair color. + console.log(`\tAttribute: ${attribute.name}; `); + }); + }); + }); +} + +async function main() { + require(`yargs`) + .demand(1) + .command( + `video-person-gcs `, + `Detects people in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => detectPersonGCS(opts.gcsUri) + ) + .command( + `video-person `, + `Detects people in a video stored in a local file using the Cloud Video Intelligence API.`, + {}, + opts => detectPerson(opts.path) + ) + .command( + `video-faces-gcs `, + `Detects faces in a video stored in Google Cloud Storage using the Cloud Video Intelligence API.`, + {}, + opts => detectFacesGCS(opts.gcsUri) + ) + .command( + `video-faces `, + `Detects faces in a video stored in a local file using the Cloud Video Intelligence API.`, + {}, + opts => detectFaces(opts.path) + ) + .example(`node $0 video-person ./resources/googlework_short.mp4`) + .example( + `node $0 video-person-gcs gs://cloud-samples-data/video/googlework_short.mp4` + ) + .example(`node $0 video-faces ./resources/googlework_short.mp4`) + .example( + `node $0 video-faces-gcs gs://cloud-samples-data/video/googlework_short.mp4` + ) + .wrap(120) + .recommendCommands() + .epilogue( + `For more information, see https://cloud.google.com/video-intelligence/docs` + ) + .help() + .strict().argv; +} + +main().catch(console.error); diff --git a/samples/system-test/analyze.v1p3beta1.test.js b/samples/system-test/analyze.v1p3beta1.test.js new file mode 100644 index 00000000..bd87fea5 --- /dev/null +++ b/samples/system-test/analyze.v1p3beta1.test.js @@ -0,0 +1,49 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// https://cloud.google.com/video-intelligence/docs/ + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); + +const cmd = 'node analyze.v1p3beta1.js'; +const url = 'gs://cloud-samples-data/video/googlework_short.mp4'; +const file = 'resources/googlework_short.mp4'; + +describe('analyze v1p3beta1 samples', () => { + it('should detect people in a local file', async () => { + const output = execSync(`${cmd} video-person ${file}`); + assert.match(output, /Hair/); + }); + + it('should detect people in a GCS file', async () => { + const output = execSync(`${cmd} video-person-gcs ${url}`); + assert.match(output, /Hair/); + }); + + it('should detect faces in a local file', async () => { + const output = execSync(`${cmd} video-faces ${file}`); + assert.match(output, /glasses/); + }); + + it('should detect faces in a GCS file', async () => { + const output = execSync(`${cmd} video-faces-gcs ${url}`); + assert.match(output, /glasses/); + }); +}); From 82a500cd7b5eb248f35aac101641bf0b7c826143 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Wed, 5 Feb 2020 21:32:41 -0800 Subject: [PATCH 3/7] Fixes license header --- samples/system-test/analyze.v1p3beta1.test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/samples/system-test/analyze.v1p3beta1.test.js b/samples/system-test/analyze.v1p3beta1.test.js index bd87fea5..7c63f7db 100644 --- a/samples/system-test/analyze.v1p3beta1.test.js +++ b/samples/system-test/analyze.v1p3beta1.test.js @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,11 +16,11 @@ 'use strict'; -const {assert} = require('chai'); -const {describe, it} = require('mocha'); +const { assert } = require('chai'); +const { describe, it } = require('mocha'); const cp = require('child_process'); -const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); +const execSync = cmd => cp.execSync(cmd, { encoding: 'utf-8' }); const cmd = 'node analyze.v1p3beta1.js'; const url = 'gs://cloud-samples-data/video/googlework_short.mp4'; From 4162583a4d84406a78a0f6c7adc1d4f1a82fccb8 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Wed, 5 Feb 2020 21:50:20 -0800 Subject: [PATCH 4/7] Fixes some linter errors --- samples/system-test/analyze.v1p3beta1.test.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/system-test/analyze.v1p3beta1.test.js b/samples/system-test/analyze.v1p3beta1.test.js index 7c63f7db..2abc7568 100644 --- a/samples/system-test/analyze.v1p3beta1.test.js +++ b/samples/system-test/analyze.v1p3beta1.test.js @@ -16,11 +16,11 @@ 'use strict'; -const { assert } = require('chai'); -const { describe, it } = require('mocha'); +const {assert} = require('chai'); +const {describe, it} = require('mocha'); const cp = require('child_process'); -const execSync = cmd => cp.execSync(cmd, { encoding: 'utf-8' }); +const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const cmd = 'node analyze.v1p3beta1.js'; const url = 'gs://cloud-samples-data/video/googlework_short.mp4'; From 00c23cbf7c08231411a3334de32825d70539de46 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Thu, 6 Feb 2020 13:01:09 -0800 Subject: [PATCH 5/7] Makes changes per reviewer feedback. --- samples/analyze.v1p3beta1.js | 200 ++++++++++++++++------------------- 1 file changed, 92 insertions(+), 108 deletions(-) diff --git a/samples/analyze.v1p3beta1.js b/samples/analyze.v1p3beta1.js index 7726c556..476e50d0 100644 --- a/samples/analyze.v1p3beta1.js +++ b/samples/analyze.v1p3beta1.js @@ -29,7 +29,7 @@ async function detectPerson(path) { // const path = 'Local file to analyze, e.g. ./my-file.mp4'; // Reads a local video file and converts it to base64 - const file = await util.promisify(fs.readFile)(path); + const file = fs.readFileSync(path); const inputContent = file.toString('base64'); const request = { @@ -53,54 +53,47 @@ async function detectPerson(path) { const personAnnotations = results[0].annotationResults[0].personDetectionAnnotations; - personAnnotations.forEach(personAnnotation => { + for (const {tracks} of personAnnotations) { console.log('Person detected:'); - const tracks = personAnnotation.tracks; - tracks.forEach(track => { - const time = track.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; + for (const {segment, timestampedObjects} of tracks) { + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; } console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tStart: ${segment.startTimeOffset.seconds}.` + + `${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); // Each segment includes timestamped objects that // include characteristic--e.g. clothes, posture // of the person detected. - const [firstTimestampedObject] = track.timestampedObjects; + const [firstTimestampedObject] = timestampedObjects; // Attributes include unique pieces of clothing, // poses, or hair color. - firstTimestampedObject.attributes.forEach(attribute => { - console.log( - `\tAttribute: ${attribute.name}; ` + `Value: ${attribute.value}` - ); - }); - // Landmarks in person detection include body parts. - firstTimestampedObject.landmarks.forEach(landmark => { - console.log( - `\tLandmark: ${landmark.name}; ` + - `Vertex: ${landmark.point.x}, ${landmark.point.y}` - ); - }); - }); - }); + for (const {name, value} of firstTimestampedObject.attributes) { + console.log(`\tAttribute: ${name}; ` + `Value: ${value}`); + } + // Landmarks in person detection include body parts. + for (const {name, point} of firstTimestampedObject.landmarks) { + console.log(`\tLandmark: ${name}; Vertex: ${point.x}, ${point.y}`); + } + } + } // [END video_detect_person_beta] } async function detectPersonGCS(gcsUri) { @@ -136,54 +129,48 @@ async function detectPersonGCS(gcsUri) { const personAnnotations = results[0].annotationResults[0].personDetectionAnnotations; - personAnnotations.forEach(personAnnotation => { + for (const {tracks} of personAnnotations) { console.log('Person detected:'); - const tracks = personAnnotation.tracks; - tracks.forEach(track => { - const time = track.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; + + for (const {segment, timestampedObjects} of tracks) { + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; } console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); // Each segment includes timestamped objects that // include characteristic--e.g. clothes, posture // of the person detected. - const [firstTimestampedObject] = track.timestampedObjects; + const [firstTimestampedObject] = timestampedObjects; // Attributes include unique pieces of clothing, // poses, or hair color. - firstTimestampedObject.attributes.forEach(attribute => { - console.log( - `\tAttribute: ${attribute.name}; ` + `Value: ${attribute.value}` - ); - }); - // Landmarks in person detection include body parts. - firstTimestampedObject.landmarks.forEach(landmark => { - console.log( - `\tLandmark: ${landmark.name}; ` + - `Vertex: ${landmark.point.x}, ${landmark.point.y}` - ); - }); - }); - }); + for (const {name, value} of firstTimestampedObject.attributes) { + console.log(`\tAttribute: ${name}; ` + `Value: ${value}`); + } + // Landmarks in person detection include body parts. + for (const {name, point} of firstTimestampedObject.landmarks) { + console.log(`\tLandmark: ${name}; Vertex: ${point.x}, ${point.y}`); + } + } + } // [END video_detect_person_beta] } async function detectFaces(path) { @@ -201,7 +188,7 @@ async function detectFaces(path) { // const path = 'Local file to analyze, e.g. ./my-file.mp4'; // Reads a local video file and converts it to base64 - const file = await util.promisify(fs.readFile)(path); + const file = fs.readFileSync(path); const inputContent = file.toString('base64'); const request = { @@ -224,43 +211,41 @@ async function detectFaces(path) { const faceAnnotations = results[0].annotationResults[0].faceDetectionAnnotations; - faceAnnotations.forEach(faceAnnotation => { + for (const {tracks} of faceAnnotations) { console.log('Face detected:'); - const tracks = faceAnnotation.tracks; - tracks.forEach(track => { - const time = track.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; + for (const {segment, timestampedObjects} of tracks) { + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; } console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); // Each segment includes timestamped objects that // include characteristics of the face detected. - const [firstTimestapedObject] = track.timestampedObjects; + const [firstTimestapedObject] = timestampedObjects; - firstTimestapedObject.attributes.forEach(attribute => { + for (const {name} of firstTimestapedObject.attributes) { // Attributes include unique pieces of clothing, like glasses, // poses, or hair color. - console.log(`\tAttribute: ${attribute.name}; `); - }); - }); - }); + console.log(`\tAttribute: ${name}; `); + } + } + } } async function detectFacesGCS(gcsUri) { //[START video_detect_faces_gcs_beta] @@ -294,43 +279,42 @@ async function detectFacesGCS(gcsUri) { const faceAnnotations = results[0].annotationResults[0].faceDetectionAnnotations; - faceAnnotations.forEach(faceAnnotation => { + for (const {tracks} of faceAnnotations) { console.log('Face detected:'); - const tracks = faceAnnotation.tracks; - tracks.forEach(track => { - const time = track.segment; - if (time.startTimeOffset.seconds === undefined) { - time.startTimeOffset.seconds = 0; + + for (const {segment, timestampedObjects} of tracks) { + if (segment.startTimeOffset.seconds === undefined) { + segment.startTimeOffset.seconds = 0; } - if (time.startTimeOffset.nanos === undefined) { - time.startTimeOffset.nanos = 0; + if (segment.startTimeOffset.nanos === undefined) { + segment.startTimeOffset.nanos = 0; } - if (time.endTimeOffset.seconds === undefined) { - time.endTimeOffset.seconds = 0; + if (segment.endTimeOffset.seconds === undefined) { + segment.endTimeOffset.seconds = 0; } - if (time.endTimeOffset.nanos === undefined) { - time.endTimeOffset.nanos = 0; + if (segment.endTimeOffset.nanos === undefined) { + segment.endTimeOffset.nanos = 0; } console.log( - `\tStart: ${time.startTimeOffset.seconds}` + - `.${(time.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tStart: ${segment.startTimeOffset.seconds}` + + `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( - `\tEnd: ${time.endTimeOffset.seconds}.` + - `${(time.endTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tEnd: ${segment.endTimeOffset.seconds}.` + + `${(segment.endTimeOffset.nanos / 1e6).toFixed(0)}s` ); // Each segment includes timestamped objects that // include characteristics of the face detected. - const [firstTimestapedObject] = track.timestampedObjects; + const [firstTimestapedObject] = timestampedObjects; - firstTimestapedObject.attributes.forEach(attribute => { + for (const {name} of firstTimestapedObject.attributes) { // Attributes include unique pieces of clothing, like glasses, // poses, or hair color. - console.log(`\tAttribute: ${attribute.name}; `); - }); - }); - }); + console.log(`\tAttribute: ${name}; `); + } + } + } } async function main() { From 9d9c5a1a18c4375947617fc337722be25ef45fb9 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Thu, 6 Feb 2020 13:05:39 -0800 Subject: [PATCH 6/7] Additional small changes --- samples/analyze.v1p3beta1.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/analyze.v1p3beta1.js b/samples/analyze.v1p3beta1.js index 476e50d0..e8292e7f 100644 --- a/samples/analyze.v1p3beta1.js +++ b/samples/analyze.v1p3beta1.js @@ -296,8 +296,8 @@ async function detectFacesGCS(gcsUri) { segment.endTimeOffset.nanos = 0; } console.log( - `\tStart: ${segment.startTimeOffset.seconds}` + - `.${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` + `\tStart: ${segment.startTimeOffset.seconds}.` + + `${(segment.startTimeOffset.nanos / 1e6).toFixed(0)}s` ); console.log( `\tEnd: ${segment.endTimeOffset.seconds}.` + From c96b1792c7aabd627b6ce84f9109a0a228588ebd Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Thu, 6 Feb 2020 13:20:02 -0800 Subject: [PATCH 7/7] Fixes more linter issues --- samples/analyze.v1p3beta1.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/samples/analyze.v1p3beta1.js b/samples/analyze.v1p3beta1.js index e8292e7f..eab4352e 100644 --- a/samples/analyze.v1p3beta1.js +++ b/samples/analyze.v1p3beta1.js @@ -19,7 +19,6 @@ async function detectPerson(path) { // Imports the Google Cloud Video Intelligence library + Node's fs library const Video = require('@google-cloud/video-intelligence').v1p3beta1; const fs = require('fs'); - const util = require('util'); // Creates a client const video = new Video.VideoIntelligenceServiceClient(); @@ -178,7 +177,6 @@ async function detectFaces(path) { // Imports the Google Cloud Video Intelligence library + Node's fs library const Video = require('@google-cloud/video-intelligence').v1p3beta1; const fs = require('fs'); - const util = require('util'); // Creates a client const video = new Video.VideoIntelligenceServiceClient();