NodeJS sample

A complete sample app written in NodeJS to access OSAIS services

1/ Authenticate into OSAIS as a Client App

Your application needs to be authenticated into OSAIS. Our authToken has a validity of 3 days, but we recommend authenticating into OSAIS every 24 hours.

const async_login = async function (){
    try {
        let response = await axios.post("https://opensourceais.com/api/v1/public/client/login", {
            token: <your_token>,
            secret: <your_secret>
        }, {
            headers: { 
                name: 'content-type',
                value: 'application/json'
            }
        });
        if(response && response.data) {
            // response.data contains the authentication token as {authToken: ...}
            // user the authToken in Bearer of next calls
            return response.data;             
        }

        throw {
            data: null,
            status: 400,
            statusText: "Could not authenticate as a client"
        }
    }
    catch (err) {
        throw err;
    }
}

2/ Implement routes for receiving notifications from OSAIS

You application needs to implement two routes for receiving notifications from the AI. One route is for the stage notification, and one route is for the file upload notification and processing.


// this route will receive all notifications
router.post("/notify", function(req, res, next) {
    routeUtils.apiPost(req, res, myClient.async_testClientNotify.bind(testClient), {
        uid: req.body.uid? parseInt(req.body.uid): null,
        stage: req.body.stage? parseInt(req.body.stage): 0,
        token: req.body.token? req.body.token: "0",
        cycle: req.body.cycle? parseInt(req.body.cycle) : 0,
        engine: req.body.engine? req.body.engine : null,
        username: req.body.username? req.body.username : null,
        filename: req.body.filename? req.body.filename : null,
        descr: req.body.descr
    });
});

// this route will receive the file from the AI
router.post("/upload",  imgServices.uploadImg, function(req, res, next) {
    routeUtils.apiPost(req, res, myClient.async_onUploadImage.bind(testClient), {
        uid: req.body.uid? parseInt(req.body.uid): null,
        token: req.body.token? req.body.token: "0",
        cycle: req.body.cycle? parseInt(req.body.cycle) : 0,
        filename: req.ai? req.ai.filename: null,
        engine: req.body.engine? req.body.engine : null,
    });
});

3/ Send an AI request to OSAIS

Your application can call AIs by sending a POST request to OSAIS. Each AI has its own route into OSAIS, which is simple api/v1/private/client/ai/<name_of_the_ai>. Here below are two examples, one for AI Ping, one for AI Stable Diffusion.

const async_aiPing = async function (objParam, authToken) {
    try {
        let response = await axios.post"https://opensourceais.com/api/v1/private/client/ai/ping", {
            url_upload: objParam.url_upload,           // the input image
            width: 512,
            height: 512
        }, {
            headers: { 
                Authorization: "Bearer "+ authToken,
                name: 'content-type',
                value: 'application/json'
            }
        });

        if(response && response.data) {
            return response.data; 
        }

        throw {
            data: null,
            status: 400,
            statusText: "could not call AI Ping"
        };                
    }
    catch(err){
        throw err;
    }
}

const async_aiStableDiffusion = async function (objParam, authToken) {
    try {
        let response = await axios.post"https://opensourceais.com/api/v1/private/client/ai/diffusion", {

            url_upload: objParam.url_upload,           // did we request an image upload?
            word: "a zombie in london",                // the text input
            width: 512,
            height: 512,
            strength: 0.5,
            seed: 1,
            steps: 20,
        }, {
            headers: { 
                Authorization: "Bearer "+ authToken,
                name: 'content-type',
                value: 'application/json'
            }
        });

        if(response && response.data) {
            return response.data; 
        }

        throw {
            data: null,
            status: 400,
            statusText: "could not call AI Stable Diffusion"
        };                
    }
    catch(err){
        throw err;
    }
}

4/ Understand what to process on the Notification calls

Notifications are received on the /notify route. Here are the various possible stages that can be received. Note that images can still be received after the AI has stopped processing. It is also possible that several images are received, one per notification, if the AI was requested to output multiple files.


AI_PROGRESS_ERROR: -1,           // we stopped the request in error
AI_PROGRESS_REQSENT: 0,          // the request got sent to a specific AI (not yet acknowledged)
AI_PROGRESS_REQRECEIVED: 1,      // the request was received and acknowledged by the AI, but job not yet started
AI_PROGRESS_START_AI: 2,         // the AI started processin ghte request
AI_PROGRESS_INIT_IMAGE: 3,       // the AI starts procesing the file (image, or other)
AI_PROGRESS_DONE_IMAGE: 4,       // the AI finished processing the file 
AI_PROGRESS_STOP_AI: 5,          // the AI has finished processing the request

AI generated files (in the case of Stable Diffusion, images) are received on the /upload route. Note that in this case, via our service "imgServices.uploadImg", we used multer to process the image immediately. here is the code for this.


const multer = require('multer');
const uploadImg= multer({
    storage: multer.diskStorage({
        destination: function (req, file, cb) {
            cb(null, _getDirectory());
        },
        filename: function (req, file, cb) {
            let ext = file.originalname.split(".").pop().toLowerCase();
            let _filename=_getDirectory()+file.originalname;
            req.ai={
                filename:_filename,
                ext: ext
            };
            cb(null, file.originalname);
        }
    }), 
    limits: {fileSize: 1024000},     // 1Mb
    fileFilter(req, file, cb) {
        if (!file.originalname.match(/\.(png|jpg|jpeg)$/)){
            cb(new Error('Unreconised format - Please upload an image.'))
        }
        cb(undefined, true)
    }
}).single('image');

Both endpoints /notify and /upload must reply to the OSAIS caller, with a specific JSON content, so that OSAIS acknowledges that the request was received and processed correctly.

It is important to return correct calls, since for example, in the case of a badly configured nGROK (calling an unknown endpoint), the notification endpoint would still return (no error) but it would not be a valid acknowledgement that the request was processed correctly by the receiving application.

The expected returned data is a properly formed JSON as below

{
    data: {
        token: <your token id>
        uid: <the uid of the processed item>
    }
}

Both the <token> and the <uid> are passed in parameters to the notification endpoint, so it is just a matter for the receiving application to send them back as acknowledgement of having processed the notification.

Last updated