Example AI Applications

Complete and diverse set of AI use-cases written in Javascript. Each example covers data integration, transformation, reasoning/learning, runtime configuration and results handling.

NBA Super Star

NBA expert decision making on who was an NBA super star

Predict Customer Churn

Identify which customers are going to leave you

The Stock is on Fire!

Spot a when a stock starts trending

 

Perfect Ski Vacation

Recommend personalized ski offerings at real-time

Twitter Optimization

Find the perfect time to tweet when my followership is active

Drug Designer Search

Find protein functional sites in an efficient manner

 

Predict Customer Churn

You are the customer relations manager of a company that offers a subscription music streaming service. You want to be able to predict which customers won’t renew their subscription during the 30 days after their subscription expires to incentivize them and prevent churn. You know that the behavior patterns of customers over time are significant for determining if a customer may churn and seek a solution that can accurately capture such patterns.

Gameplan

Task: Predict which of the currently subscribed customers are likely to churn based on historical data of both types of customers: those that eventually churned and those that didn’t.

Data Sources: REST data provider which provides access to your users’ listening activity logs and subscription renewal/cancellation transactions. Specific historical data from which discriminative behavior patterns should be learned is fetched only once. The recent data from which churn user behavior should be predicted are continuously fetched every week.

Search for Patterns where:

  1. Only the most recent 14 day activity of a specific user is considered.
  2. Churn behavior is learned from users who had a subscription ending in Feb. 2017 which was either renewed more than 30 days after the subscription ended or not renewed at all.
  3. Non-churn behavior is learned from users who had a subscription ending in Feb. 2017 which was renewed within 30 days.
  4. The patterns are discriminative, i.e. significantly more common among one type of users than the other.
Source Code
const bcToken = "my_braincast_token";
const bcSecretKey = "my_secret_api_key";
const bc = BC.createClient(bcToken, bcSecretKey);

const ChurnAdapterTemplate = {
    openAPISpec: Examples.OpenAPI.JSON.Churn, // the Open API Specification (OAS) of a REST data source for accessing the churn user data
    path:"/query",  // the HTTP endpoint to use from the OAS spec
    operation:"get",  // the HTTP method to use
    params:{     // required values for parameters specified in the OAS
        "apikey":     "AKIAIOSFODNNSLOE:Yxg83MZaEgh3OZ3l0rLo5RTX11o="
    }
};

//
// define and validate data sources
//
let transactionsStream = bc.Datastore.getStream("Churn.Example.transactions");
if (!transactionsStream) {
    transactionsStream = bc.Datastore.defineStream(
        "Churn.Example.transactions",//source name
        BC.Utils.SourceAdapters.createREST(ChurnAdapterTemplate, {"params": {"function": "transactions"}}),// Reuse adapter to fetch data for user transactions (i.e. subscriptions/cancellations)
        [// specify which fields of the records being fetched from the source should be used to make up the stream's output record (if omitted, all fields will be used)
            "transaction_date",         // day of transaction (formatted as YYYYMMDD)
            "msno",                     // unique user identifier
            "payment_plan_days",        // number of days in subscription period
            "membership_expire_date",   // day on which the current subscription expires
            "is_cancel",                 // 0 if the transaction represents a new/re-newed subscription or 1 if it represents a cancellation of subscription
            "is_auto_renew"              // 0 if the transaction represents a manual subscription renewal or 1 if it represents either an automatic subscription renewal or a cancellation of subscription
        ]
    );
    if (!transactionsStream || !transactionsStream.validate()) {
        throw "Access to Churn user transactions data feed does not work, reason: " + transactionsStream.validationErrorMsg;
    }
}
let userLogStream = bc.Datastore.getStream("Churn.Example.userLogs");
if (!userLogStream) {
    userLogStream = bc.Datastore.defineStream(
        "Churn.Example.userLogs",
        BC.Utils.SourceAdapters.createREST(ChurnAdapterTemplate, {"params": {"function": "user_logs"}}),// Reuse adapter to fetch data for the user daily user logs describing listening activity
        [
            "date",         // day of activity (formatted as YYYYMMDD)
            "msno",         // unique user identifier
            "num_25",       // # of songs played less than 25% of the song length
            "num_50",       // # of songs played between 25% to 50% of the song length
            "num_75",       // # of songs played between 50% to 75% of of the song length
            "num_985",      // # of songs played between 75% to 98.5% of the song length
            "num_100",      // # of songs played over 98.5% of the song length
            "total_secs"    // total amount of seconds the user listened to on this day
        ]
    );
    if (!userLogStream || !userLogStream.validate()) {
        throw "Access to Churn user logs data feed does not work, reason: " + userLogStream.validationErrorMsg;
    }
}

// -----------   Learn to identify user churn behaviour that differs from behaviour of non-churning users:   -----------
//
// build data view to learn from
//
let viewOutput = { //Define the view's output:
    "outputRecordHeader": { // the structure of the output record where each property is the name of one of the record's attributes and the value is that attribute's description
        "date": "the day at which the record's data occurs",
        "userId": "the user's identifier",
        "totalHours": "the number of total listening hours rounded to the nearest integer",
        "totalSongs": "the total number of songs listened to",
        "isCancel": "true if there was a cancellation transaction on this day, otherwise false",
        "isAutoRenew": "true if there was an automatic subscription renewal transaction on this day, otherwise false",
        "paymentPlanDays": "number of days in the current subscription period",
        "currentSubscriptionEnd": "the day that the subscription on this day ends",
        "nextSubscriptionDate": "the day that the user renewed the subscription after the current subscription ended"
    },
    "outputRecordGenerator": function (inputRecordQueues) { // a function that specifies how data coming from the input sources is used to generate the pipe's output records whose structure must correspond to outputRecordHeader. This function will be invoked by the system after each input consumption step. inputRecordQueues is a set of queues, each holding the latest batch of consumed input data records for a specific input source (corresponding to that source's specified filter and in the order of consumption).

        let usersSubscriptionPeriods = findUsersSubscriptionPeriods(inputRecordQueues["transactions"]);
        // Merge the two input record queues into an output record queue while synchronizing according to the date and user id (i.e. user log and transaction records for the same user that occurred on the same day will be combined into a single record):
        let outputRecordsQueue = [];
        let transactions = inputRecordQueues["transactions"];
        let userLogs = inputRecordQueues["userLogs"];
        let i = 0, j = 0, date, userId, totalHours, totalSongs, isCancel, isAutoRenew, currentSubscriptionEnd;
        while (i < transactions.length && j < userLogs.length) {
            date =   userLogs[j]["date"];
            userId = userLogs[j]["msno"];
            currentSubscriptionEnd = getSubscriptionInfoForDate(usersSubscriptionPeriods[userId],date,"currentSubscriptionEnd");
            let isValidUserLog = isValidUserLog(userLogs[j],currentSubscriptionEnd);
            totalHours =  isValidUserLog ? userLogs[j]["total_secs"] / 3600 : undefined;
            totalSongs =  isValidUserLog ? getTotalSongs(userLogs[j]) : undefined;
            isCancel =    transactions[i]["is_cancel"];
            isAutoRenew = transactions[i]["is_auto_renew"];
            if (transactions[i]["msno"] === userLogs[j]["msno"] &&          // same user
                transactions[i]["transaction_date"] === userLogs[j]["date"])// same day
            {
                i++;
                j++;
            } else if (transactions[i]["transaction_date"] >= userLogs[j]["date"]) { // add an output record only for the current user log (with missing transaction related attribute values):
                if(isValidUserLog){
                    isCancel =    undefined;
                    isAutoRenew = undefined;
                }else{
                    date = undefined; // this is needed so we don't add an output record corresponding to invalid user log records
                }
                j++;
            } else {  // add an output record only for the current transaction (with missing user log related attribute values):
                date = transactions[i]["transaction_date"];
                userId = transactions[i]["msno"];
                currentSubscriptionEnd =  getSubscriptionInfoForDate(usersSubscriptionPeriods[userId],date,"currentSubscriptionEnd");
                totalHours =  undefined;
                totalSongs =  undefined;
                i++;
            }
            if(date){
                outputRecordsQueue.push({
                    "date":                   date,
                    "userId":                 userId,
                    "totalHours":             totalHours,
                    "totalSongs":             totalSongs,
                    "isCancel":               isCancel,
                    "isAutoRenew":            isAutoRenew,
                    "paymentPlanDays":        getSubscriptionInfoForDate(usersSubscriptionPeriods[userId],date,"paymentPlanDays"),
                    "currentSubscriptionEnd": currentSubscriptionEnd,
                    "nextSubscriptionDate":   getSubscriptionInfoForDate(usersSubscriptionPeriods[userId],date,"nextSubscriptionDate")
                });
            }
        }
        return outputRecordsQueue;
    }
};
let learningView = bc.Datastore.getView("Churn.Example.view.learning");
if (!learningView) {//Create a view of data between 1-10-2016 and 31-3-2017 to learn from:
    learningView = bc.Datastore.createView(
        {
            "inputSources": {// the sources of records inputted to the view
                "transactions": {
                    "source": transactionsStream,// the actual source
                    "filter": {"where": "transaction_date <= 20170331 AND transaction_date >= 20161001"}  // the filter for this source specifying that only transactions between 1-10-2016 and 31-3-2017 should be consumed
                },
                "userLogs": {
                    "source": userLogStream,
                    "filter": {"where": "date <= 20170331 AND date >= 20161001"}
                }
            },
            "rate": BC.Meta.Period.Once,  // the input consumption rate specifying that all available data from all the input sources should be consumed once so as to always learn from the same data
        },
        viewOutput
    );
    if (!learningView.validate()) {
        throw "Learning view does not work, reason: " + view.validationErrorMsg;
    }
    learningView.save("Churn.Example.view.learning");
}

//
// -----------   Predict which users are likely to churn based on what was learned by the learning scanner:   -----------
//

// Build data view of the currently subscribed users' data:
let predictionView = bc.Datastore.getView("Churn.Example.view.prediction");
if(!predictionView){
    predictionView = bc.Datastore.createView(
    {
        "inputSources": {// the sources of records inputted to the view
            "transactions": {
                "source": transactionsStream,// the actual source
                "filter": {"where": "DATE(transaction_date,'YYYYMMDD') >= CURDATE(-3)"}  // the filter for this source specifying that only transactions for the past 3 months should be consumed
            },
            "userLogs": {
                "source": userLogStream,
                "filter": {"where": "DATE(transaction_date,'YYYYMMDD') >= CURDATE(-3)"}
            }
        },
        "rate": BC.Meta.Period.Week,  // the input consumption rate specifying that all available data from all the input sources should be consumed once every week (or BC.Meta.Period.Once which is also the default)
    },
    viewOutput
    );
    if (!predictionView.validate()) {
        throw "Prediction view does not work, reason: " + view.validationErrorMsg;
    }
    predictionView.save("Churn.Example.view.prediction");
}

// Specify constraints that define which data instances in both learning and prediction views should be analyzed (i.e. to learn from and make predictions about)
// IMPORTANT NOTE: these constraints must be defined over attributes that are common to the records of both learning and prediction views!
let instanceConstraints = [
    BC.Meta.Functions.Constraints.newConstraint(
        "user14DaySequence", // constraint name
        function (curSequence) {
            return  curSequence.records.length === 14 &&
                    curSequence.records.every(rec=>rec["userId"]===curSequence.records[0]["userId"]);
        },
        "instance is a user's 14 day long sequence", // explanation
    ),
    BC.Meta.Functions.Constraints.newMaximalValueConstraint(
        "mostRecentSequence",
        function (curSequence) { // a function whose returned value will be used to select one of all possible sequences satisfying all other learning constraints (this is an aggregation constraint)
            let avgDate = 0;
            curSequence.records.forEach(rec=>avgDate+=rec["date"]);
            return  avgDate / curSequence.records.length;
        },
        "instance is the most recent sequence (with the latest average date of all records)"
    )
];

// Specify constraints that check whether or not an instance in the learning view represents a certain type:
let typeConstraints = [
    BC.Meta.Functions.Constraints.newConstraint(
        "churn", // constraint name
        function (curSequence) {
            let lastDayRec = curSequence.records[curSequence.records.length-1];
            return  lastDayRec["currentSubscriptionEnd"] >= 20170201 && lastDayRec["currentSubscriptionEnd"] <= 20170301 && // the last day is within a user's subscription period which ends in Feb. 2017
                (!lastDayRec["nextSubscriptionDate"] || // and this user did not have another subscription period after Feb. 2017
                    numOfDaysBetween(lastDayRec["currentSubscriptionEnd"],lastDayRec["nextSubscriptionDate"] > 30)) // or this user had another subscription period but it began more than 30 days after the previous subscription period ended
        },
        "An instance in the learning view is of type 'churn' if it corresponds to a user who had a subscription ending in Feb. 2017 which the user either renewed more than 30 days after the subscription ended or didn't renew at all" // explanation
    ),
    BC.Meta.Functions.Constraints.newConstraint(
        "not churn", // constraint name
        function (curSequence) {
            let lastDayRec = curSequence.records[curSequence.records.length-1];
            return  lastDayRec["currentSubscriptionEnd"] >= 20170201 && lastDayRec["currentSubscriptionEnd"] <= 20170301 && // the last day is within a user's subscription period which ends in Feb. 2017
                (!lastDayRec["nextSubscriptionDate"] || // and this user did not have another subscription period after Feb. 2017
                    numOfDaysBetween(lastDayRec["currentSubscriptionEnd"],lastDayRec["nextSubscriptionDate"] > 30)) // or this user had another subscription period but it began more than 30 days after the previous subscription period ended
        },
        "An instance in the learning view is of type 'not churn' if it corresponds to a user who had a subscription ending in Feb. 2017 which the user renewed within 30 days" // explanation
    )
];

// Build a prediction scanner can scan the behaviour of the currently subscribed users in the prediction view and predict which users are likely to churn:
let predictionScanner = bc.Datastore.getScanner("Churn.Example.churnPredictor");
if (!predictionScanner) {
    predictionScanner = bc.Datastore.createPredictionScanner(
        learningView, // the view from which the scanner will learn to how to predict an instance's type
        predictionView, // the view containing the instances whose type needs to be predicted.
        instanceConstraints, // constraints that define what a data instance is in both learning and prediction views, i.e. must be defined over attributes that are common to the records of both learning and prediction views
        typeConstraints // constraints that define the different types of data instances (an instance may be of one, many or none of these types)
    );
    if (!predictionScanner.validate()) {
        throw "Scanner not configured properly, reason: " + predictionScanner.validationErrorMsg;
    }

    predictionScanner.settings = {
        "schedule": BC.Meta.Period.Week, // or BC.Meta.Period.Once which is the default
        "predict": "churn", // set the type of user behaviour to predict by specifying one of the type constraint's name
        "activation": BC.Utils.ActionAdapters.createJSCallback(function (result) {
            let predictedUserSequence = result.value;
            let probability = Number.parseFloat(result.probability) * 100;
            alert(`User: ${predictedUserSequence.records[0]["userId"]} is ${probability}% likely to churn because" + ${result.explanation}`);
        })
    };

    predictionScanner.save("Churn.Example.churnPredictor");
}

// start predicting churn users:
predictionScanner.predict();

//
// helper functions
//
function findUsersSubscriptionPeriods(transactions) {
    let usersSubscriptionPeriods = {};
    for(let i=0;i=subscriptionPeriods[i]["start"] && date<=subscriptionPeriods[i]["end"]) {
            switch (infoToGet) {
                case "nextSubscriptionDate":
                    return subscriptionPeriods[i + 1] ? subscriptionPeriods[i + 1]["start"] : undefined;
                case "currentSubscriptionEnd":
                    return subscriptionPeriods[i]["end"];
                case "paymentPlanDays":
                    return subscriptionPeriods[i]["paymentPlanDays"];
            }
        }
    }
    return undefined;
}
function isValidUserLog(userLog, expirationDate) {
    return numOfDaysBetween(userLog["date"],expirationDate) < 0 && //verify that the user log record did not occur after the subscription expiration date (this is an illegal user log record)
            userLog["total_secs"] / 3600 <= 24;
}
function getTotalSongs(userLog) {
    return userLog["num_25"] + userLog["num_50"] + userLog["num_75"] + userLog["num_985"] + userLog["num_100"];
}
function getDateFromString(date1Str) {
    return new Date(Number.parseInt(date1Str.slice(0, 4)), Number.parseInt(date1Str.slice(4, 6)), Number.parseInt(date1Str.slice(6, 8)));
}
function numOfDaysBetween(date1Str, date2Str) {
    let oneDay = 24 * 60 * 60 * 1000; // hours*minutes*seconds*milliseconds
    let date1 = getDateFromString(date1Str);
    let date2 = getDateFromString(date2Str);
    return Math.round((date1.getTime() - date2.getTime()) / (oneDay));
}



Spot a Trending Stock at Real-time

You are the product manager of a successful stocks news mobile app, and you want to add a smart alerting service so your customers can be notified immediately when a specific stock of interest trends upwards. It will increase your customer retention dramatically. 

Gameplan

Task: Monitor the changes in a specific stock price and volume as well as the NASDAQ index price and volume and search for unique behavioral patterns within 10 seconds time frames.

Data Sources: A REST data provider delivering stock price and volume updates every second.

Search for Patterns where:

  1. There is a a sequence of at least two price changes in Apple’s stock that increase from one change to another​.
  2. The trend occurs within a 10 second window.
  3. At every second of the trend:
    1. Apple’s stock trading volume is higher than the average of the last minute.
    2. Apple’s stock price change is 50% higher than the last minute average.
    3. NASDAQ index price change is 50% higher than the last minute average.
Source Code
const bcToken = "my_braincast_token";
const bcSecretKey = "my_secret_api_key";
const bc = BC.createClient(bcToken, bcSecretKey);

const alphaVantageAdapterTemplate = {
    openAPISpec: Examples.OpenAPI.JSON.Alphavantage.Intraday, // the Open API Specification (OAS) for our REST data source
    path: "/query",  // the HTTP endpoint to use from the OAS spec
    operation: "get",  // the HTTP method to use
    params: {     // required values for parameters specified in the OAS
        "function": "TIME_SERIES_DAILY_ADJUSTED",
        "interval": "1sec", //Get 1 second ticks
        "apikey": "AKIAIOSFODNNSLOE:Yxg83MZaEgh3OZ3l0rLo5RTX11o="
    }
};

//
// define and validate data sources
//
let aaplStream = bc.Datastore.getStream("Stocks.Example.AAPL");
if (!aaplStream) {
    aaplStream = bc.Datastore.defineStream(
        "Stocks.Example.AAPL",//source name
        BC.Utils.SourceAdapters.createREST(alphaVantageAdapterTemplate, {"params": {"symbol": "AAPL"}}),// Reuse adapter to fetch data for Apple stocks
        ["Timestamp", "Price", "Volume"] // specify which fields of the records being fetched from the source should be used to make up the stream's output record (if omitted, all fields will be used)
    );
    if (!aaplStream || !aaplStream.validate()) {
        throw "Access to AAPL stocks example feed does not work, reason: " + aaplStream.validationErrorMsg;
    }
}
let idxStream = bc.Datastore.getStream("Stocks.Example.QQQ");
if (!idxStream) {
    idxStream = bc.Datastore.defineStream(
        "Stocks.Example.QQQ", //source name
        BC.Utils.SourceAdapters.createREST(alphaVantageAdapterTemplate, {"params": {"symbol": "QQQ"}}),// Reuse adapter to fetch data for NASDAQ index
        ["Timestamp", "Price"] //fields from source to use in stream's output records
    );
    if (!idxStream || !idxStream.validate()) {
        throw "Access to QQQ stocks example feed does not work, reason: " + idxStream.validationErrorMsg;
    }
}

//
// build view
//
let view = bc.Datastore.getView("Stocks.Example.view");
if (!view) {
    //Create a view that outputs bulks of the last minute data combined from the streams, every minute:
    let lastMinuteFilter = {"where": "Timestamp > CURTIME() - 60000"};
    view = bc.Datastore.createView(
        {
            "inputSources": {// the sources of records inputted to the view (in other cases some of the input sources can be another view instead of a data source)
                "apple": {
                    "source": aaplStream,      // the actual source
                    "filter": lastMinuteFilter // the filter for this source specifying that only the apple stock data for the last minute should be consumed
                },
                "idx": {
                    "source": idxStream,
                    "filter": lastMinuteFilter
                }
            },
            "rate": BC.Meta.Period.Min,  // the input consumption rate specifying that all available data from all the input sources  should be consumed once every minute (or BC.Meta.Period.Once which is the default)
        },
        {
            "outputRecordHeader": { // the structure of the output record where each property is the name of one of the record's attribute and the value is that attribute's description
                "timestamp": "the timestamp at which the record's data occurs",
                "stockCng": "the difference between the current and previous price of the stock",
                "stockAvgCng": "the average of all stock change values occurring in the last minute",
                "ratioStockVol": "the the current stock volume divided by the average stock volume in the last minute",
                "idxCng": "the difference between the current and previous price of the index",
                "idxAvgCng": "the average of all index change values occurring in the last minute"
            },
            "outputRecordGenerator": function (inputRecordQueues) { // a function that specifies how data coming from the input sources is used to generate the view's output records whose structure must correspond to outputRecordHeader. This function will be invoked by the system after each input consumption step. inputRecordQueues is a set of queues, each holding the latest batch of consumed input data records for a specific input source (corresponding to that source's specified filter and in the order of consumption).
                let syncedRecordsQueue = [];
                let outputRecordsQueue = [];
                let applRecs = inputRecordQueues["apple"];
                let idxRecs = inputRecordQueues["idx"];
                let i = 0, j = 0, nOut = 0, stockAvgCng = 0, stockAvgVol = 0, idxAvgCng = 0, stockCng, idxCng;
                while (i < applRecs.length && j < idxRecs.length) {
                    if (applRecs[i].Timestamp === idxRecs[j].Timestamp) {
                        syncedRecordsQueue[nOut] = {
                            "timestamp": applRecs[i].Timestamp,
                            "stockPrice": applRecs[i].Price,
                            "stockVol": applRecs[i].Volume,
                            "idxPrice": idxRecs[j].Price
                        };
                        if (nOut > 0) {
                            stockCng = syncedRecordsQueue[nOut].stockPrice - syncedRecordsQueue[nOut - 1].stockPrice;
                            idxCng = syncedRecordsQueue[nOut].idxPrice - syncedRecordsQueue[nOut - 1].idxPrice;
                            if (nOut === 1) {
                                stockAvgCng = stockCng;
                                idxAvgCng = idxCng;
                                stockAvgVol = syncedRecordsQueue[nOut].stockVol;
                            } else {
                                stockAvgCng = (outputRecordsQueue[nOut - 2].stockAvgCng * (nOut - 1) + stockCng) / nOut;
                                idxAvgCng = (outputRecordsQueue[nOut - 2].idxAvgCng * (nOut - 1) + idxCng) / nOut;
                                stockAvgVol = (outputRecordsQueue[nOut - 2].stockAvgVol * (nOut - 1) + syncedRecordsQueue[nOut].stockVol) / nOut;
                            }
                            outputRecordsQueue[nOut - 1] = {
                                "timestamp": syncedRecordsQueue[nOut].timestamp,
                                "stockCng": stockCng,
                                "stockAvgCng": stockAvgCng,
                                "ratioStockVol": syncedRecordsQueue[nOut].stockVol / stockAvgVol,
                                "idxCng": idxCng,
                                "idxAvgCng": idxAvgCng
                            };
                        }
                        i++;
                        j++;
                        nOut++
                    } else if (applRecs[i].Timestamp > idxRecs[j].Timestamp) {
                        j++;
                    } else {
                        i++;
                    }
                }
                return outputRecordsQueue; //note that in this case, we throw away records from one source that don't sync with records from the other source.
            }
        }
    );

    if (!view.validate()) {
        throw "View does not work, reason: " + view.validationErrorMsg;
    }

    view.save("Stocks.Example.view", false); // second parameter is whether to override an existing view
}

//
// build scanner
//
if (!view.getScanner("mytrendingtheory")) { // the name of the scanner and the patterns inside the scanner are in the namespace of the view
    let scanner = view.createScanner();
    scanner.addConstraint("constraint10sec", // constraint name
        function (curSequence) {
            return curSequence.records[curSequence.records.length - 1].timestamp - curSequence.records[0].timestamp <= 10000;
        },
        "10 second record sets", // explanation
        true // pattern is active - to disable a pattern call getPattern("name").active(false);
    );
    scanner.addConstraint("constraintAtLeast3Records",
        function (curSequence) {
            return curSequence.records.length >= 3;
        },
        "record sets with at least 3 records",
        true
    );
    scanner.addConstraint("constraintGrowingVol",
        function (curSequence) {
            return curSequence.records.every(record => record.ratioStockVol > 1);
        },
        "stock volume should be higher than the average volume",
        true
    );
    scanner.addConstraint("constraintPriceHigher",
        function (curSequence) {
            return curSequence.records.every(record => record.stockCng > 1.5 * record.stockCngAvg);
        },
        "change in Apple's stock price for each record should be more than 50% above the average",
        true
    );
    scanner.addConstraint("constraintIndexRising",
        function (curSequence) {
            return curSequence.records.every(record => record.idxCng > 1.5 * record.idxCngAvg);
        },
        "change in nasdaq 100 index stock price for each record should be more than 50% above the average",
        true
    );
    scanner.addConstraint("constraintEndingBehavior",
        function (curSequence) {
            for (let i = 1; i < curSequence.records.length; i++) {
                if (!curSequence.records[i].stockCng > curSequence.records[i - 1].stockCng)
                    return false;
            }
            return true;
        },
        "change in stock price is preferably higher than its value for the previous record",
        true
    );

    scanner.settings = {
        "schedule": BC.Meta.Period.Min, // or BC.Meta.Period.Once which is the default
        "activation": BC.Utils.ActionAdapters.createJSCallback(function (result) {
            alert("Found trend in apple's stock: " + result.value + "\nExplanation: " + result.explanation);
        }),
        "start": "Thu May 02 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // when to start back-test
        "end": "Thu May 09 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // finish back-test after a week
    };
    if (!scanner.validate()) {
        throw "Scanner not configured properly, reason: " + scanner.validationErrorMsg;
    }

    scanner.save("mytrendingtheory", true); // override if such exists
}

// run our scanner on the view
view.scan("mytrendingtheory");

Find the Perfect Ski Vacation

You are run an online travel booking service, and you wish to increase customer engagement with an intelligent ski vacation advisor that will find the best week for a ski vacation while considering the customers’ availability, resort weather, ski lanes availability and the prices of flights and hotels.

Gameplan

Task: Find the best week for a ski vacation where the best times for ski is when there is enough snow, the prices are low and my calendar is free from meetings.

Data Sources: Customer’s calendar, weather, flight and hotel prices. All via REST data providers.

Search for Patterns where:  

  • A one week vacation.
  • All days on my calendar should be free of meetings.
  • Number of holiday days in a week should be at least three.
  • There should be at least 2 and up to 5 days of snow.
  • The average snow depth should be more than 2 meters.
  • The total cost for the hotels should be under 400$ and the hotel can’t be one that the user explicitly specified to exclude.
  • The overall flights cost should be under 300$.
Source Code
const bcToken = "my_braincast_token";
const bcSecretKey = "my_secret_api_key";
const bc = BC.createClient(bcToken, bcSecretKey);
const destination = "Aspen";
const source = "Tel-Aviv";
const hotelsToExclude = ["Leonardo"];

//
// define and validate data sources
//
let dataSourcesInfo = [ //an array of data source information objects we will use to access, filter and transform the required data:
    {
        "name": "Vacation.Example.Weather",//source name
        "fields": ["Date", "SnowDepth"],
        "openAPITemplate": Examples.Adapters.WeatherOpenAPITemplate,// An Open API Specification template for the weather API
        "params": {"location": destination},// reuse the Open API template by setting the desired location
        "filter": {"where": "Date > CURDATE() - 10"}// extract the weather data only for recent 10 years
    },
    {
        "name": "Vacation.Example.Calander",
        "fields": ["Date", "IsHoliday", "NumberOfEvents"],
        "openAPITemplate": Examples.Adapters.CalendarAPITemplate,
        "params": {"apikey": "AKIAIOSFODNNSLOE:Yxg83MZaEgh3OZ3l0rLo5RTX11o="},
        "filter": {"where": "Date > CURDATE() AND Date < CURDATE() + 1"}// extract one year look ahead data
    },
    {
        "name": "Vacation.Example.Flights",
        "fields": ["Date", "Source", "Destination", "Price"],
        "openAPITemplate": Examples.Adapters.FlightsOpenAPITemplate, // this API only returns data for unreserved seats and there can be multiple records for the same date
        "filter": {
            "where": "((Source = '" + source + "' AND " + "Destination = '" + destination + "') " +
                "OR (Source = '" + destination + "' AND " + "Destination = '" + source + "')) " +
                "AND Date > CURDATE() AND Date < CURDATE() + 1"
        }// extract one year lookahead data for all 2-way flights with available seats

    },
    {
        "name": "Vacation.Example.Hotels",
        "fields": ["Date", "Name", "Price"],
        "openAPITemplate": Examples.Adapters.HotelsAPITemplate, // this API only returns data for available rooms and there can be multiple records for the same date
        "params": {"destination": destination},
        "filter": {"where": "Date > CURDATE() AND Date < CURDATE() + 1"} // extract one year look ahead data for all available rooms in hotels at the destination
    }
];

//
// build view
//
let view = bc.Datastore.getView("Vacation.Example.IdealVacationView");
if (!view) {
    let inputSources = {};
    dataSourcesInfo.forEach(dataSourceInfo =>{
        let stream = bc.Datastore.getStream(dataSourceInfo.name);
        if (!stream) { // if the stream hasn't been previously defined
            stream = bc.Datastore.defineStream(
                dataSourceInfo.name,//source name
                BC.Utils.SourceAdapters.createREST(dataSourceInfo.openAPITemplate, // REST adapter to fetch data from REST API
                                                   {"params": dataSourceInfo.params}), // reuse the Open API template by adding or overriding the relevant parameter values
                dataSourceInfo.fields // specify which fields of the records being fetched from the source should be used to make up the stream's output record (if omitted, all fields will be used)
            );
            if (!stream || !stream.validate()) {
                throw "Access to " + dataSourceInfo.name + " feed does not work, reason: " + stream.validationErrorMsg;
            }
        }
        // Add this stream as an input source to a view required for syncing and transforming the data into view records:
        inputSources[dataSourceInfo.name] = {
            "source": stream,
            "filter": dataSourceInfo.filter  // the filter specifying which data should be consumed from the source
        };
    });
    // Create a view that consumes the data from all 4 streams on a daily basis, combines and transforms the data and outputs records that can be used by the view:
    view = bc.Datastore.createView(
        {
            "inputSources":inputSources,
            "rate":BC.Meta.Period.Day //or BC.Meta.Period.Once which is the default
        },
        {
            "outputRecordHeader": { // the structure of the output record where each property is the name of one of the record's attribute and the value is that attribute's description
                "dayOfYear":    "the day of the year represented by a number between 1 and 365",
                "depth":        "the average snow depth on the associated day over the past 10 year period",
                "hotelName":    "the name of a hotel at the destination",
                "hotelPrice":   "the price of an available room in hotelName on the associated day",
                "availability": "true if there are no calendar events on the associated day, otherwise false",
                "isHoliday":    "true if the associated day is a holiday, otherwise false",
                "flightPrice":  "the price of a flight to destination on the associated day (will be undefined if there are no available flights on this day)",
                "destination":  "the flight's destination (will be undefined if there are no available flights on this day)"
            },
            "outputRecordGenerator": function (inputRecordQueues) { // a function that specifies how data coming from the input sources is used to generate the view's output records whose structure must correspond to outputRecordHeader. This function will be invoked by the system after each input consumption step. inputRecordQueues is a set of queues, each holding the latest batch of consumed input data records for a specific input source (corresponding to that source's specified filter and in the order of consumption).
                let daysOfYear = {};
                dataSourcesInfo.forEach(dataSourceInfo =>{
                    let sourceName = dataSourceInfo["name"];
                    inputRecordQueues[sourceName].forEach(inputRecord => {
                        let dayOfYear = date2DayOfYear(inputRecord["Date"]);
                        if(!daysOfYear[dayOfYear]){
                            daysOfYear[dayOfYear]={"depths":[],"flights":[],"isHoliday":false,"availability":false,"hotels":[]};
                        }
                        switch(sourceName){
                            case "Vacation.Example.Weather":
                                daysOfYear[dayOfYear]["depths"].push(inputRecord["SnowDepth"]);
                                break;
                            case "Vacation.Example.Flights":
                                daysOfYear[dayOfYear]["flights"].push(inputRecord);
                                break;

                            case "Vacation.Example.Calendar":
                                daysOfYear[dayOfYear]["isHoliday"] = inputRecord["IsHoliday"]===1;
                                daysOfYear[dayOfYear]["availability"] = inputRecord["NumberOfEvents"] > 0;
                                break;
                            case "Vacation.Example.Hotels":
                                daysOfYear[dayOfYear]["hotels"].push(inputRecord);
                                break;
                        }
                    })
                });
                let outputRecordsQueue = [];
                for(let dayOfYear = 1; dayOfYear <= 365; dayOfYear++){ // add output records in ascending day of year order
                    if(daysOfYear[dayOfYear]) { // verify that data exists for this day
                        let depthForCurDay = average(daysOfYear[dayOfYear]["depths"]);
                        // generate an output record for all (flight,hotel) record pairs for this day:
                        let hotelsForCurDay = daysOfYear[dayOfYear].hotels;
                        let flightsForCurDay = daysOfYear[dayOfYear].flights;
                        for (let i = 0; i < hotelsForCurDay.length; i++) {
                            if(flightsForCurDay.length>0){
                                for (let j = 0; j < flightsForCurDay.length; i++) {
                                    outputRecordsQueue[outputRecordsQueue.length] = {
                                        "dayOfYear":    dayOfYear,
                                        "depth":        depthForCurDay,
                                        "hotelName":    hotelsForCurDay[i]["Name"],
                                        "hotelPrice":   hotelsForCurDay[i]["Price"],
                                        "availability": daysOfYear[dayOfYear]["availability"],
                                        "isHoliday":    daysOfYear[dayOfYear]["isHoliday"],
                                        "flightPrice":  flightsForCurDay[j]["Price"],
                                        "destination":  flightsForCurDay[j]["Destination"]
                                    };
                                }
                            }else{ // if there are no available flights on this day
                                outputRecordsQueue[outputRecordsQueue.length] = {
                                    "dayOfYear":    dayOfYear,
                                    "depth":        depthForCurDay,
                                    "hotelName":    hotelsForCurDay[i]["Name"],
                                    "hotelPrice":   hotelsForCurDay[i]["Price"],
                                    "availability": daysOfYear[dayOfYear]["availability"],
                                    "isHoliday":    daysOfYear[dayOfYear]["isHoliday"]
                                };
                            }
                        }
                    }
                }
                return outputRecordsQueue;
            }
        }
    );

    if (!view.validate()) {
        throw "View does not work, reason: "+view.validationErrorMsg;
    }
    view.save("Vacation.Example.IdealVacationView", false); // second parameter is whether to override an existing view
}

//
// build scanner
//
if (!view.getScanner("myIdealVacationTheory")) { // the name of the scanner and the patterns inside the scanner are in the namespace of the view
    let scanner = view.createScanner();
    scanner.addConstraint("constraint1Week", // constraint name
        function(curSequence) {
            let numOfDays = curSequence.records.length;
            for (let i = 1; i < numOfDays; i++) { // verify that all days are consecutive. This is necessary because consecutive records may correspond to the same day (corresponding to different hotels/flights) or to days that aren't consecutive (recall that the view will not posses a record for days on which there were no available hotel rooms)
                let numOfDaysBetweenRecords = curSequence.records[i].dayOfYear-curSequence.records[i-1].dayOfYear;
                if(numOfDaysBetweenRecords !== 1 ||
                   numOfDaysBetweenRecords !== -364) // in case curSequence.records[i] corresponds to January 1st and curSequence.records[i-1] corresponds to December 31
                {
                    return false;
                }
            }
            return  numOfDays === 7; // vacation must be seven days long
        },
        "sequences corresponding to 7 consecutive days", // explanation
        true // pattern is active - to disable a pattern call getPattern("name").active(false);
    );
    scanner.addConstraint("constraintCalendarAvailability",
        function(curSequence) {
            return  curSequence.records.every(record => record.availability);
        },
        "calender availability for all days in sequence",
        true
    );
    scanner.addConstraint("constraintAtLeast3Holidays",
        function(curSequence) {
            let numOfHolidays = 0;
            for (let i = 1; i < curSequence.records.length; i++) {
                if (curSequence.records[i].isHoliday)
                    numOfHolidays++;
            }
            return numOfHolidays >= 3;
        },
        "should be at least 3 days corresponding to holidays",
        true
    );
    scanner.addConstraint("constraintSnowfall",
        function(curSequence) {
            let numOfDaysWithSnowfall = curSequence.records.filter(record => record.depth > 0).length;
            return numOfDaysWithSnowfall >= 2 && numOfDaysWithSnowfall < 5;
        },
        "number of days with snowfall should be at least 2 and less than 5",
        true
    );
    scanner.addConstraint("constraintAvgDepth",
        function(curSequence) {
            let avgDepth = 0;
            curSequence.records.forEach(record => avgDepth += record.depth);
            avgDepth /= curSequence.records.length;
            return avgDepth > 2;
        },
        "average depth for all days should be higher than 2 meters",
        true
    );
    scanner.addConstraint("constraintHotel",
        function(curSequence) {
            let hotelName = curSequence.records[0].hotelName;
            if(hotelsToExclude.includes(hotelName))return false;
            let hotelPriceForAllDays = 0;
            for (let i = 0; i < curSequence.records.length; i++) {
                let record = curSequence.records[i];
                if(record.hotelName !== hotelName){  // must be same hotel on all days
                    return false;
                }
                hotelPriceForAllDays += record.hotelPrice
            }
            return hotelPriceForAllDays < 400;
        },
        "same hotel, which can't be any of "+hotelsToExclude+", must be available for all days and the total price should be under 400$",
        true
    );
    scanner.addConstraint("constraintFlights",
        function(curSequence) {
            let firstDay = curSequence.records[0];
            let lastDay = curSequence.records[curSequence.records.length-1];
            return  firstDay.flightPrice && firstDay.destination === destination && // there must be an available flight to the destination on the first day
                    lastDay.flightPrice && lastDay.destination === source && // there must be an available flight back on the last day
                    firstDay.flightPrice + lastDay.flightPrice < 300;
        },
        "there must be a flight to the destination on the first day and a flight back to the source on the last day, such that the total trip cost is under 300$",
        true
    );

    scanner.settings = {
        "schedule" : BC.Meta.Period.Day, // or BC.Meta.Period.once which is the default
        "activation" : BC.Utils.ActionAdapters.createJSCallback(function(result) {
            alert("Found your ideal vacation: "+ result+"\nExplanation: "+result.explanation);
        }),
        "start" : "Thu May 02 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // when to start back-test
        "end" : "Thu May 09 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // finish back-test after a week
    };
    if (!scanner.validate()) {
        throw "Scanner not configured properly, reason: "+scanner.validationErrorMsg;
    }
    scanner.save("myIdealVacationTheory", true); // override if such exists
}

// run our scanner on the view
view.scan("myIdealVacationTheory");

//
// helper functions:
//
function date2DayOfYear(date) {
    return Math.ceil((date - new Date(date.getFullYear(), 0, 1)) / 86400000);
}
function average(array) {
    return array.reduce(function (sum, a) {
        return sum + a
    }, 0) / (array.length || 1);
}


Send My Tweet at the Perfect Time!

You are the product manager of a social media management platform and wish to create an intelligent tweeter auto-scheduling feature. You realize that if you add smart tweeting based on the level of daily activity of your followers, you can optimize tweet exposure and increase your customers’ users engagement level with their followers.

Gameplan

Task: Analyze the behavior of your followers over the past week and find the best daily 14 half-hour slots to tweet.

Data Sources: Twitter REST data provider. Analyze every midnight.

Search for Patterns where:

  • Analyze 5 minute intervals within half hour windows.
  • The number of active followers increases in at least 3 five minute intervals.
  • Window must contain at least one five minute interval in which the amount of active followers is above 75% of the maximum during the past week.
  • Window must contain at least one five minute interval in which the number of activities is above 75% of the maximum during the past week.
Source Code
const bcToken = "my_braincast_token";
const bcSecretKey = "my_secret_api_key";
const bc = BC.createClient(bcToken, bcSecretKey);
const followers = Examples.Twitter.myFollowers;

const twitterAdapterTemplate = {
	openAPISpec: Examples.OpenAPI.JSON.Twitter, // the Open API Specification (OAS) for our REST data source
	path:"/query",  // the HTTP endpoint to use from the OAS spec
	operation:"get",  // the HTTP method to use
	params:{     // required values for parameters specified in the OAS
		"function":   "statuses/user_timeline",
		"apikey":     "AKIAIOSFODNNSLOE:Yxg83MZaEgh3OZ3l0rLo5RTX11o="
	}
};

//
// define and validate data sources
//
let inputSources = {};
let today = new Date();
let pastWeekFilter = {"where":"created_at > "+today.getTime()+" - 1000*60*60*24*7"};
followers.forEach(follower => {
	let followerStream = bc.Datastore.getStream("Twitter.Example." + follower);
	if (!followerStream) {
		followerStream = bc.Datastore.defineStream(
			"Twitter.Example." + follower,//source name
			BC.Utils.SourceAdapters.createREST(twitterAdapterTemplate, {"params": {"user_id": follower}}),// Reuse adapter to fetch data for follower
			["user_id", "created_at"] // specify which fields of the records being fetched from the source should be used to make up the stream's output record (if omitted, all fields will be used)
		);
		if (!followerStream || !followerStream.validate()) {
			throw "Access to follower " + follower + "'s Twitter example feed does not work, reason: " + followerStream.validationErrorMsg;
		}
	}
	inputSources[follower] = {
		"source": followerStream, // the actual source
		"filter": pastWeekFilter  // the filter for this source specifying that only the data for the past week should be consumed
	};
});

let view = bc.Datastore.createView(
	{
		"inputSources": inputSources, // the sources of records inputted to the view
		"rate": BC.Meta.Period.Day,  // the input consumption rate specifying that all available data from all the input sources should be consumed once every day (or BC.Meta.Period.Once which is the default)
	}, // the input to the view, in this case a data source (can also be another view)
	{
		"outputRecordHeader": { // the structure of the output record where each property is the name of one of the record's attribute and the value is that attribute's description
			"FiveMinuteIntervalOfWeek": "a value in the range [1,500] representing a 5 minute interval of the analyzed 7 day period, where [day1 at 00:00 - day1 at 00:05] is represented by 1 and [day7 at 23:55 - day7 at 24:00] is represented by 500",
			"ActivitiesCount": "the activity level measured as the number of followers that were active during the associated FiveMinuteIntervalOfWeek",
			"ActiveFollowers": "the number of distinct followers that were active during the associated FiveMinuteIntervalOfWeek",
			"ActivitiesCountMax": "the maximum ActivitiesCount encountered during the analyzed 7 day period",
			"ActiveFollowersMax": "the maximum ActiveFollowers encountered during the analyzed 7 day period"
		},
		"outputRecordGenerator": function (inputRecordQueues) { // a function that specifies how data coming from the input sources is used to generate the view's output records whose structure must correspond to outputRecordHeader. This function will be invoked by the system after each input consumption step. inputRecordQueues is a set of queues, each holding the latest batch of consumed input data records for a specific input source (corresponding to that source's specified filter and in the order of consumption).
			let fiveMinIntervals = {};
			let activitiesCountMax = 0;
			let activeFollowersMax = 0;
			followers.forEach(follower => {
				inputRecordQueues[follower].forEach(record => {
					let interval = BC.Meta.Period.fiveMinIntervalOfWeek(record.created_at,today);// calculate the five minute interval of the week ending today which contains record.created_at
					if(!fiveMinIntervals[interval]){
						fiveMinIntervals[interval]={"ActiveFollowers":0,"ActivitiesCount":0};
					}
					if (!fiveMinIntervals[interval][record.user_id]) {
						fiveMinIntervals[interval][record.user_id] = 0;
						fiveMinIntervals[interval]["ActiveFollowers"]++;
						if (fiveMinIntervals[interval]["ActiveFollowers"] > activeFollowersMax) {
							activeFollowersMax = fiveMinIntervals[interval]["ActiveFollowers"];
						}
					}
					fiveMinIntervals[interval][record.user_id]++;
					fiveMinIntervals[interval]["ActivitiesCount"]++;
					if (fiveMinIntervals[interval]["ActivitiesCount"] > activitiesCountMax) {
						activitiesCountMax = fiveMinIntervals[interval]["ActivitiesCount"];
					}
				})
			});
			let outputRecordsQueue = [];
			for(let interval = 1; interval<=500; interval++){ // add output records in ascending 5 min interval of the week order
				if(fiveMinIntervals[interval]) { // verify that data exists for this interval
					outputRecordsQueue[outputRecordsQueue.length] = {
						"FiveMinuteIntervalOfWeek": interval,
						"ActivitiesCount": fiveMinIntervals[interval].ActivitiesCount,
						"ActiveFollowers": fiveMinIntervals[interval].ActiveFollowers,
						"ActivitiesCountMax": fiveMinIntervals[interval].ActivitiesCountMax,
						"ActiveFollowersMax": fiveMinIntervals[interval].ActiveFollowersMax
					};
				}
			}
			return outputRecordsQueue;
		}
	}
);


if (!view.validate()) {
	throw "View does not work, reason: "+view.validationErrorMsg;
}

view.save("Twitter.Example.followersView", true); // second parameter is whether to override an existing view

//
// build scanner
//
if (!view.getScanner("myBestTimeToTweetTheory")) { // the name of the scanner and the patterns inside the scanner are in the namespace of the view
	let twitterActivityScanner = view.createScanner();
	twitterActivityScanner.addConstraint("constraint30min", // constraint name
		function(curSequence) {
			return  curSequence.records[curSequence.records.length-1].FiveMinuteIntervalOfWeek - curSequence.records[0].FiveMinuteIntervalOfWeek <= 6;
		},
		"30 minutes size sequences", // explanation
		true // pattern is active - to disable a pattern call getPattern("name").active(false);
	);
	twitterActivityScanner.addConstraint("constraintGrowingActiveFollowerCountWithAtLeast3Records",
		function(curSequence) {
			for (let i = 1; i < curSequence.records.length; i++) {
				if (curSequence.records[i].ActiveFollowers <= curSequence.records[i - 1].ActiveFollowers)
					return false;
			}
			return  curSequence.records.length >= 3;
		},
		"sequence should have at least 3 five minute intervals with an increasing number of active followers",
		true
	);
	twitterActivityScanner.addConstraint("constraintAtLeast1ActiveFollowerAbove75%Max",
		function(curSequence) {
			return curSequence.records.some(record => record.ActiveFollowers > 0.75 * record.ActiveFollowersMax);
		},
		"amount of active followers for at least one record is above 75% of maximum of the week",
		true
	);
	twitterActivityScanner.addConstraint("constraintAtLeast1ActivityCountAbove75%Max",
		function(curSequence) {
			return curSequence.records.some(record => record.ActivitiesCount > 0.75 * record.ActivitiesCountMax);
		},
		"amount of activities for at least one record is above 75% of maximum of the week",
		true
	);

	twitterActivityScanner.settings = {
		"schedule" : BC.Meta.Period.Day, // or BC.Meta.Period.Once which is the default
		"activation" : BC.Utils.ActionAdapters.createJSCallback(function(result) {
			alert("Found the best time to tweet: "+ result+"\nExplanation: "+result.explanation);
		}),
		"start" : "Thu May 02 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // when to start back-test
		"end" : "Thu May 09 2019 11:48:05 GMT+0300 (Israel Daylight Time)", // finish back-test after a week
	};
	if (!twitterActivityScanner.validate()) {
		throw "Scanner not configured properly, reason: "+twitterActivityScanner.validationErrorMsg;
	}

	twitterActivityScanner.save("myBestTimeToTweetTheory", true); // override if such exists
}

// run our scanner on view
view.scan("myBestTimeToTweetTheory");

Drug Designers Delight

You head an R&D division in a large pharmaceutical company that specializes in drug design. Your researchers often waste precious time and resources pursuing potential anti-influenza NA antigens that fail to neutralize different subtypes of the target viral protein efficiently. You want to find functionally conserved sites across different subtypes with additional factors such as solvent accessibility or steric interference. 

Gameplan

Task: Find the characteristic functional site of the influenza NA protein family where influenza NA protein family share similar amino acid sequences combination. Analyze the 3D structures and the amino acid sequences of influenza NA protein family members. Search for a combination of three amino acids that occur in the exact same order in at least 90% of the proteins, and which are spatially located up to 4 angstroms from one another.

Data Sources: Protein Data Bank (PDB) records for each atom of every protein, containing the 3D coordinates, the containing amino acid’s name and position along the protein sequence’s, the containing protein chain and PDB identifier of the protein.

Search for Patterns where:  

  • Find subsequences of 3 amino acids.
  • Occurs in at least 90% of all proteins.
  • Amino acids belong to the same protein.
  • The largest spatial distance between the centroid atoms of every two amino acids (diameter) must be no more than 4 angstroms.
Source Code
const token = "my_braincast_token";
const secretKey = "my_secret_api_key";
const bc = BC.createClient(token, secretKey);
// A list of PDB ids for the proteins that represent the 10 different influenza NA subtypes:
const proteinFamilyFromUserRequest = ["3B7E","2AEP","4HZV","2HTV","3SAL","4QN4","4QN3","4WA3","4MWJ","4GDJ"];

//
// define and validate data sources
//
let stream = bc.Datastore.getStream("Influenza.Example.proteinDataset");
if (!stream) {
    stream = bc.Datastore.defineStream(
        "Influenza.Example.proteinDataset",//source name
        BC.Utils.SourceAdapters.createS3Adapter( // adapter for fetching a CSV file containing amino acid (AA) sequence and 3D info for many NA proteins, from AWS S3
            "AWS AKIAIOSFODNN7EXAMPLE:bWq2s1WEIj+Ydj0vQ697zp+IXMU=", //authorization string
            "proteins", // S3 bucket
            "NA.csv", // S3 object
            "CSV" // object type
        ),
        ["PdbID","Chain","AAPosition","AAName","XCoord","YCoord","ZCoord"] //fields from source to use in stream's output records
    );
    if (!stream || !stream.validate()) {
        throw "Cannot access data source, reason: "+stream.validationErrorMsg;
    }
}

//
// build view
//
let view = bc.Datastore.getView("Influenza.Example.proteinsView"); //This allows us to reuse a previously saved view. Note however, that if we want to change the content of proteinFamilyFromUserRequest, e.g. per user request, we will need to create a new view
if (!view) {
    //Create a view that outputs all amino acid records for the specified set of proteins in the order of the proteins' sequences:
    view = bc.Datastore.createView(
        {
            "inputSources": {// the sources of records inputted to the view (in other cases some of the input sources can be another view instead of a data source)
                "proteins": {
                    "source": stream,
                    "filter": {"where":"PdbID IN ("+proteinFamilyFromUserRequest.join(',')+") AND Chain = 'A'"}, // the filter for this source specifying that only the data for proteins in the specified set should be consumed
                    "order":["AAPosition"] // output ordering - order the amino acid records according to their order within their associated protein's sequence
                }
            } // the input source will only be consumed once since this is the default when no consumption rate is specified
        },
        {
            "outputRecordHeader": { // the structure of the output record where each property is the name of one of the record's attribute and the value is that attribute's description
                "PdbID":"the identifier of the protein that the amino acid represented by this record is associated with",
                "AAPosition":"the position of the amino acid within its associated protein",
                "AAName":"the name of the amino acid",
                "XCent":"the x-coordinate of the the amino acid's centroid atom",
                "YCent":"the y-coordinate of the the amino acid's centroid atom",
                "ZCent":"the z-coordinate of the the amino acid's centroid atom"
            },
            "outputRecordGenerator": function (inputRecordQueues) { // a function that specifies how data coming from the input sources is used to generate the view's output records whose structure must correspond to outputRecordHeader. This function will be invoked by the system after each input consumption step. inputRecordQueues is a set of queues, each holding the latest batch of consumed input data records for a specific input source (corresponding to that source's specified filter and in the order of consumption).
                let proteins = {};
                inputRecordQueues["proteins"].forEach(record => {
                    if (!proteins[record["PdbID"]]) {
                        proteins[record["PdbID"]] = {};
                    }
                    if (!proteins[record["PdbID"]][record["Chain"]]) {
                        proteins[record["PdbID"]][record["Chain"]] = {};
                    }
                    if(!proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]]){
                        proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]]={"numOfAtoms":0,"XCent":0,"YCent":0,"ZCent":0};
                    }
                    proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]].numOfAtoms++;
                    proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]].XCent+=record["XCoord"];
                    proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]].YCent+=record["YCoord"];
                    proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]].ZCent+=record["ZCoord"];
                });
                let outputRecordsQueue = [];
                inputRecordQueues["proteins"].forEach(record => {
                    let aminoAcidAtomsData = proteins[record["PdbID"]][record["Chain"]][record["AAPosition"]];
                    outputRecordsQueue[outputRecordsQueue.length] = {
                        "PdbID": record["PdbID"],
                        "AAPosition": record["AAPosition"],
                        "AAName": record["AAName"],
                        "XCent": aminoAcidAtomsData.XCent/aminoAcidAtomsData.numOfAtoms,
                        "YCent": aminoAcidAtomsData.YCent/aminoAcidAtomsData.numOfAtoms,
                        "ZCent": aminoAcidAtomsData.ZCent/aminoAcidAtomsData.numOfAtoms
                    };
                });
                return outputRecordsQueue;
            }
        }
    );

    if (!view.validate()) {
        throw "View does not work, reason: "+view.validationErrorMsg;
    }
    view.save("Influenza.Example.proteinsView", false); // second parameter is whether to override an existing view
}

//
// build scanner
//
if (!view.getScanner("myFunctionalSiteDetectionTheory")) { // the name of the scanner and the patterns inside the scanner are in the namespace of the view
    let scanner = view.createScanner();
    scanner.addConstraint("constraintAminoAcidTriads", // constraint name
        function(curSequence) {
            return  curSequence.records.length === 3 &&
                    curSequence.records.every(record => record.PdbID === curSequence.records[0].PdbID);
        },
        "must be a sequence of three amino acids (triads) within a protein", // explanation
        true // pattern is active - to disable a pattern call getPattern("name").active(false);
    );
    scanner.addConstraint("constraintFunctionalSiteDiameter",
        function(curSequence) {
            for (let i = 0; i < curSequence.records.length; i++) {
                for (let j = i + 1; i < curSequence.records.length; i++) {
                    let aa1 = curSequence.records[i];
                    let aa2 = curSequence.records[j];
                    // Calculate the 3D distance between the current two AA in this sequence:
                    let distance = Math.sqrt(
                        Math.pow(aa2.XCent-aa1.XCent,2)+
                        Math.pow(aa2.YCent-aa1.YCent,2)+
                        Math.pow(aa2.ZCent-aa1.ZCent,2));
                    if (distance > 4e-10) {
                        return false;
                    }
                }
            }
            return true;
        },
        "all amino acids must be within a 4 angstrom spatial diameter",
        true
    );
    scanner.addConstraint("constraintMinimumSupport",
        function(curSequence) {
            // Partition view into record sequences, one for each unique PdbID, i.e. one for each protein:
            proteins = BC.Meta.Functions.Views.createPartition(
                view, // view associated with the scanner
                ["PdbID"] // the attributes by which to partition the view (i.e. all records with the same PdbID will be assigned to the same sequence).
            );
            // Use the support function to count the number of protein sequences whose AAName subsequences contain the current sequence's AAName subsequence (at least once):
            return BC.Meta.Functions.Sequences.support(
                curSequence,
                proteins,
                ["AAName"] // the attributes to consider when checking for containment
            ) >= 0.9 * proteins.length;
        },
        "should occur in at least 90% of the proteins",
        true
    );

    if (!scanner.validate()) {
        throw "Scanner not configured properly, reason: "+scanner.validationErrorMsg;
    }

    scanner.save("myFunctionalSiteDetectionTheory", true); // override if such exists
}

// run our scanner on the view
result = view.scan("myFunctionalSiteDetectionTheory");
alert("Found the following functional site in NA family: "+ result+"\nExplanation: "+result.explanation);