Skip to content

Commit

Permalink
Merge pull request #1 from patelneel55/backend_cloud_functions
Browse files Browse the repository at this point in the history
Setup backend frameworks for Memoree
  • Loading branch information
patelneel55 authored Nov 13, 2020
2 parents 5dd2e5f + 28357e9 commit b8d6976
Show file tree
Hide file tree
Showing 14 changed files with 4,540 additions and 1 deletion.
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,7 @@
# Memoree
# Memoree

AI Video Library

Assumptions:
- Two same file names with different video extensions have the same content
- The data folder doesn't change once migration process begins
5 changes: 5 additions & 0 deletions backend/.firebaserc
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"projects": {
"default": "iron-flash-284000"
}
}
69 changes: 69 additions & 0 deletions backend/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
firebase-debug.log*

# Firebase cache
.firebase/

# Firebase config

# Uncomment this if you'd like others to create their own Firebase project.
# For a team working on the same Firebase project(s), it is recommended to leave
# it commented so all members can deploy to the same project(s) in .firebaserc.
# .firebaserc

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage

# nyc test coverage
.nyc_output

# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env
secrets.json
uploaded_files.json

env/
55 changes: 55 additions & 0 deletions backend/env-parser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
const stdin = process.openStdin()
const spawn = require('child_process').spawn

function read() {
return new Promise((resolve, reason) => {
stdin.addListener("data", function (d) {
resolve(d.toString().trim())
});
});
}

function isObj(x) {
return x !== null && typeof x === 'object'
}

function parse(tree) {
const values = []
const properties = Object.keys(tree)
properties.forEach(prop => {
if (isObj(tree[prop])) {
const childrens = parse(tree[prop])
childrens.forEach(child => {
const value = prop + "." + child
values.push(value)
})
} else {
const value = prop + "=" + "\"" + tree[prop] + "\""
values.push(value)
}
})
return values
}

function runFirebaseConfigSet(properties) {
return new Promise((resolve, reject) => {
const args = ["functions:config:set"].concat(properties)
const cmd = spawn("firebase", args, { shell: true })
cmd.stdout.setEncoding('utf8')
cmd.stdout.on('data', data => { console.log(data) })
cmd.stderr.on('data', data => { console.log("Error:", cmd.stderr.toString()) })
cmd.on('close', code => {
console.log(`Exit code: ${code}`)
resolve(code)
})
})
}

read()
.then(input => {
const json = JSON.parse(input)
const properties = parse(json)
console.log("Found properties:\n", properties.map(it => "\t▷ " + it).join("\n"))
return properties
})
.then((properties) => runFirebaseConfigSet(properties))
5 changes: 5 additions & 0 deletions backend/firebase.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"functions": {
"predeploy": []
}
}
1 change: 1 addition & 0 deletions backend/functions/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node_modules/
52 changes: 52 additions & 0 deletions backend/functions/algolia.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* Copyright (c) 2020 Neel Patel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/

const algoliasearch = require('algoliasearch');
const encode = require('hashcode').hashCode;

exports.save = (records, appId, apiKey, targetIndex) => {
function _generateObjectID(obj) {
return Math.abs(
encode().value(
obj.file_name +
obj.subheader +
obj.confidence +
(obj.text || obj.entity || obj.transcript)
)
)
}

const algoliaClient = algoliasearch(appId, apiKey)
const index = algoliaClient.initIndex(targetIndex)

// Add object id to records
records = records.map((obj) => {
return {
...obj,
objectID: _generateObjectID(obj)
}
})

// Save records to Algolia
index.saveObjects(records)
}
157 changes: 157 additions & 0 deletions backend/functions/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
/*
* Copyright (c) 2020 Neel Patel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/

const functions = require('firebase-functions');
require("firebase-functions/lib/logger/compat"); // This is to properly structure console output with Node v10 in Firebase
const admin = require('firebase-admin');
const videoIntel = require('@google-cloud/video-intelligence');
const fs = require('fs');
const path = require('path');
const os = require('os');

const utils = require('./utils.js');
const algolia = require('./algolia.js');
const typesense = require('./typesense.js');

admin.initializeApp();


/**
* Uses the GCloud Video Intelligence API to analyze
* video added to the GCloud Storage Bucket
*
* @param {*} bucketObject The video file added to GCloud storage
*/
async function runVideoAnalyzer(bucketObject) {

let filePath = bucketObject.name;
let jsonPath = bucketObject.name.split(/(?:\.([^.]+))?$/)[0] + '_' + bucketObject.name.split(/(?:\.([^.]+))?$/)[1] + '.json'

console.log(
"Input URI: ", `gs://${bucketObject.bucket}/${bucketObject.name}\n`,
"Output URI: ", `gs://${functions.config().memoree.json_bucket}/${jsonPath}`
)

let request = {
inputUri: `gs://${bucketObject.bucket}/${bucketObject.name}`,
outputUri: `gs://${functions.config().memoree.json_bucket}/${jsonPath}`,
features: [
"LABEL_DETECTION",
"SHOT_CHANGE_DETECTION",
"EXPLICIT_CONTENT_DETECTION",
"FACE_DETECTION",
"SPEECH_TRANSCRIPTION",
"TEXT_DETECTION",
"OBJECT_TRACKING",
"LOGO_RECOGNITION",
"PERSON_DETECTION"
],
videoContext: {
speechTranscriptionConfig: {
languageCode: "en-US",
enableAutomaticPunctuation: true
},
faceDetectionConfig: {
includeBoundingBoxes: true,
includeAttributes: true,
},
personDetectionConfig: {
includeBoundingBoxes: true,
includePoseLandmarks: true,
includeAttributes: true
}
}
}

const videoClient = new videoIntel.v1p3beta1.VideoIntelligenceServiceClient()

const [operation] = await videoClient.annotateVideo(request);
console.log("Video annotation initatied: ", operation)

}

async function addSearchRecords(bucketObject) {
const tempFilePath = path.join(os.tmpdir(), bucketObject.name.split(/(?:\.([^.]+))?$/)[0] + '_' + bucketObject.name.split(/(?:\.([^.]+))?$/)[1] + '.json');

console.log("Adding video records to TypeSense: ", bucketObject.name)
fs.mkdirSync(path.dirname(tempFilePath), {recursive: true})
await admin
.storage()
.bucket(bucketObject.bucket)
.file(bucketObject.name)
.download({destination: tempFilePath})

const json = JSON.parse(fs.readFileSync(tempFilePath));

const parseFunc = [
utils.segment_label_annotations,
utils.shot_label_annotations,
utils.object_annotations,
utils.logo_annotations,
utils.text_annotations,
utils.face_annotations,
utils.speech_annotations
]

// Upload parsed data to search database using helper functions
parseFunc.forEach((func) => {
typesense.save(
func(json.annotation_results),
functions.config().memoree.search_host,
functions.config().memoree.search_port,
functions.config().memoree.search_apikey,
functions.config().memoree.search_index
)
})
}


// The following functions are triggered when a new entity is added or
// modified in Google Cloud Storage

const runtimeOpts = {
timeoutSeconds: 60,
memory: '2GB'
}

exports.helloWorld = functions.https.onRequest((request, response) => {
response.send("Hello from Firebase!");
});

exports.analyzeVideo = functions
.runWith(runtimeOpts)
.storage
.bucket(functions.config().memoree.video_bucket)
.object()
.onFinalize(async (object) => {
await runVideoAnalyzer(object);
})

exports.processJson = functions
.runWith(runtimeOpts)
.storage
.bucket(functions.config().memoree.json_bucket)
.object()
.onFinalize(async (object) => {
await addSearchRecords(object)
})
Loading

0 comments on commit b8d6976

Please sign in to comment.