Whenever I attempt to send an image from a canvas to Microsoft in Typescript/Angular, I keep getting an error message:
"Decoding error, image format unsupported."
However, if I manually copy the base64 of the image into a converter, then paste it into Postman and make the request, it works perfectly fine. I have converted the base64 to a blob, so theoretically it should be sending the same image. I'm not sure where the issue lies... Could it be related to HttpClient? Here's the code snippet:
import { Component, OnInit, ViewChild } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
@Component({
selector: 'app-generator',
templateUrl: './generator.component.html',
styleUrls: ['./generator.component.css']
})
export class GeneratorComponent implements OnInit {
@ViewChild('video')
public video;
@ViewChild('canvas')
public canvas;
public urlPicture: string;
public constructor(private httpClient: HttpClient) {
}
public ngOnInit() { }
// tslint:disable-next-line:use-life-cycle-interface
public ngAfterViewInit() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({ video: true }).then(stream => {
this.video.nativeElement.srcObject = stream;
this.video.nativeElement.play();
});
}
}
public capture() {
const ctx = this.canvas.nativeElement.getContext('2d');
ctx.drawImage(this.video.nativeElement, 0, 0, this.video.nativeElement.width, this.video.nativeElement.height);
const image = this.makeblob(this.canvas.nativeElement.toDataURL('image/png'));
console.log(this.canvas.nativeElement.toDataURL('image/png'));
this.detectFace(image.blob);
this.detectSkinColor(image.blob);
}
private makeblob(dataURL) {
const BASE64_MARKER = ';base64,';
if (dataURL.indexOf(BASE64_MARKER) === -1) {
// tslint:disable-next-line:no-shadowed-variable
const parts = dataURL.split(',');
// tslint:disable-next-line:no-shadowed-variable
const contentType = parts[0].split(':')[1];
// tslint:disable-next-line:no-shadowed-variable
const raw = decodeURIComponent(parts[1]);
return {
rawlength: raw.length,
blob: new Blob([raw], { type: contentType })
};
}
const parts = dataURL.split(BASE64_MARKER);
const contentType = parts[0].split(':')[1];
const raw = window.atob(parts[1]);
const rawLength = raw.length;
const uInt8Array = new Uint8Array(rawLength);
for (let i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return {
rawlength: raw.length,
blob: new Blob([raw], { type: contentType })
};
}
private detectFace(blob) {
const headers = new HttpHeaders({
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key' : 'mykey'
});
// tslint:disable-next-line:max-line-length
this.httpClient.post<any>('https://northeurope.api.cognitive.microsoft.com/face/v1.0/detect?returnFaceId=true&returnFaceLandmarks=false&returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise', blob, {headers: headers})
.subscribe((result) => {
console.log(result);
});
}
private detectSkinColor(blob) {
const headers = new HttpHeaders({
'Content-Type': 'application/octet-stream',
'Prediction-Key' : 'mykey'
});
// tslint:disable-next-line:max-line-length
this.httpClient.post<any>('https://northeurope.api.cognitive.microsoft.com/customvision/v3.0/Prediction/myprojectid/classify/iterations/Iteration1/image', blob, {headers: headers})
.subscribe((result) => {
console.log(result);
});
}
}