Fork me on GitHub Jeff Yang

HTML5 player for raw h.264 streams

要在網頁上播放影像串流有許多種方式.
常見的像是HLS, MPEG-DASH, RTMP with flash, vlc web plugin, motion JPEG…等等
以上都有其缺點
RTMP with flash => flash已陣亡
vlc web plugin => 不實用,用來測試還可以
motion JPEG => 簡單方便,但只是假的串流並不流暢
HLS,MPEG-DASH => 目前最理想的方式,兩者傳輸影像前需要先封裝(multiplex),因此對直播影像會有數秒不等的延遲.

HTML5 video tag 結合 MSE 可以直接播放 fragmented mp4格式影像
(fragmented mp4目前只可封裝H.264)

未來HLS也將支援fMP4傳送 https://en.wikipedia.org/wiki/HTTP_Live_Streaming
“On WWDC2016 Apple announced the inclusion of byte-range addressing for fragmented MP4 files, or fMP4,
allowing content to be played in HLS without the need to multiplex it into MPEG-2 Transport Stream.
The industry considered this as a step towards compatibility between HLS and MPEG-DASH”

目前為解決直播影像數秒的延遲現象,於是參考hls.js player 架構.
不傳送m3u8,直接傳送H.264然後在client端multiplex成fMP4,然後經由HTML5播放
這樣可以使延遲現象降低,不過也是有缺點像是無法利用CDN….端看需求與應用來取決

Source code:
wfs.js

That demonstrates transmuxing NALu h.264 streams into fmp4 from websocket.
That works directly on top of a standard HTML5 element and MSE.Setting HTML5 video tag combines with MSE like creating mediaSource, mediaSource.addSourceBuffer, sourceBuffer.appendBuffer.
That will play video while buffer right feeds fmp4.

Template Matching

Test template matching algorithm

Original image and Template

Original Image



Template

Test result, Original image

_



Test result, 35 degree rotation of original image

_



Test result, 135 degree rotation , width & height downscale

 _

Onvif

I spent a few days to study and implement Onvif of server side. Some important points were recorded as below:

準備:

 開發編譯皆在virtualbox ubuntu 64bit
   download gsoap
   安裝 openssl and libssl-dev: sudo apt-get install openssl libssl-dev
   安裝 bison and flex: sudo apt-get install flex bison
   ./configure
   make & make install
 兩種主要測試工具,安裝在windows:
  ONVIF Device TestTool v13.12 (not free,自行google)
  ONVIF Device Manager (free)

說明:

Onvif 主要是透過SOAP和XML進行server和client間溝通
這裡server和client和一般想像中的有點不同
server指的是設備端像是IPCAM之類,client指的是管理端一般是NVR軟體
gSOAP目的,透過gSOAP可以編譯出server或client的C或C++介面
利用gSOAP編譯出來的介面,其中的struct會對應規格書定義,
只要按照規格書定義對struct填入該填的值,client&server就可以快樂溝通了?!

實作:

http://www.onvif.org/Portals/0/documents/WhitePapers/ONVIF_WG-APG-Application_Programmer%27s_Guide.pdf
規格書定義參數 http://www.onvif.org/onvif/ver20/util/operationindex.html
gSOAP編譯步驟:

1
2
wsdl2h -cegxy -o onvif.h -t /home/jeff/gsoap-2.8/gsoap/typemap.dat http://docs.oasis-open.org/ws-dd/discovery/1.1/os/wsdd-discovery-1.1-wsdl-os.wsdl http://www.w3.org/2006/03/addressing/ws-addr.xsd http://www.onvif.org/onvif/ver10/device/wsdl/devicemgmt.wsdl   
soapcpp2 -S -x -I /home/jeff/gsoap-2.8/gsoap/import -I /home/jeff/gsoap-2.8/gsoap/ onvif.h

以上,-c產生C語言格式,typemap.dat裡面定義XML namespace prefixes and type bindings,可以自行定義
然後,依據各個wsdl檔案定義去產生onvif.h
onvif.透過soapcpp2 產生出一些.c檔,-S只產生server端的定義檔

其中,
soapStub.h

1
2
3
4
5
6
7
8
9
10
11
12
/******************************************************************************\
* *
* Server-Side Operations *
* *
\******************************************************************************/

/** Web service operation '__tds__GetServices' (returns SOAP_OK or error code) */
SOAP_FMAC5 int SOAP_FMAC6 __tds__GetServices(struct soap*, struct _tds__GetServices *tds__GetServices, struct _tds__GetServicesResponse *tds__GetServicesResponse);
/** Web service operation '__tds__GetServiceCapabilities' (returns SOAP_OK or error code) */
SOAP_FMAC5 int SOAP_FMAC6 __tds__GetServiceCapabilities(struct soap*, struct _tds__GetServiceCapabilities *tds__GetServiceCapabilities, struct _tds__GetServiceCapabilitiesResponse *tds__GetServiceCapabilitiesResponse);
/** Web service operation '__tds__GetDeviceInformation' (returns SOAP_OK or error code) */
SOAP_FMAC5 int SOAP_FMAC6 __tds__GetDeviceInformation(struct soap*, struct _tds__GetDeviceInformation *tds__GetDeviceInformation, struct _tds__GetDeviceInformationResponse *tds__GetDeviceInformationResponse);
........

這些就是需要實作的部分,依據wsdl數量所產生,也許超過百個函式以上


ONVIF Device TestTool

ONVIF Device TestTool 是模擬client端的測試動作和一些Debug用的XML輸出


Device Service Address: http://192.168.56.101:80/onvif/device_service check
其對應的是devicemgmt.wsdl中的GetDeviceInformation
所以server端所要做的就是把對接口準備好(TCP,port可以自己定,但規格書要說用80比較好,避免防火牆)
此部分socket可自行實作或交由soap_bind完成

main.c

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
#include <stdio.h>
#include <time.h>
#include <string.h>
#include <pthread.h>
#include <sys/msg.h>
#include "mySoapStub.h"

static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
int thread_ret=0, thread_no=0;
typedef struct {
pthread_t thread_tid;
long thread_count;
}tThread;
tThread tptr[2];
void tcpThread(void* data);

int _server(int argc, char **argv);

int main(int argc, char **argv){
_server(argc, argv);
return 1;
}

void tcpThread(void* data){
int m, s;
struct soap tcpSoap;
pthread_detach(pthread_self());
soap_init(&tcpSoap);
// soap_set_namespaces(&add_soap, namespaces);
m = soap_bind(&tcpSoap, NULL, 80, 100);
if (m < 0) {
soap_print_fault(&tcpSoap, stderr);
exit(-1);
}
while(1){
s = soap_accept(&tcpSoap);
if (s < 0) {
soap_print_fault(&tcpSoap, stderr);
exit(-1);
}
fprintf(stderr, "Socket connection successful: slave socket = %d\n", s);
soap_serve(&tcpSoap);
soap_end(&tcpSoap);
usleep(500000);
}
pthread_exit ("thread all done");
}

int _server(int argc, char **argv){
thread_ret=pthread_create( &tptr[thread_no].thread_tid, NULL, (void *) tcpThread, (void*)&thread_no );
if(thread_ret!=0){
fprintf (stderr, "Create pthread error~~~\n");
exit (1);
}
thread_no++;
while(1){}
usleep(500000);

}
pthread_mutex_destroy(&mutex);
return 1;
}

還有把對應函式準備好,也就是實作soapStub.h裡面的

1
SOAP_FMAC5 int SOAP_FMAC6 __tds__GetDeviceInformation(...)

mySoapStub.c

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
/** Web service operation '__tds__GetDeviceInformation' (returns SOAP_OK or error code) */
SOAP_FMAC5 int SOAP_FMAC6 __tds__GetDeviceInformation(struct soap *pSoap, struct _tds__GetDeviceInformation *tds__GetDeviceInformation, struct _tds__GetDeviceInformationResponse *tds__GetDeviceInformationResponse){
printf("\n");
tds__GetDeviceInformationResponse->Manufacturer=(char*)soap_malloc(pSoap,100);
tds__GetDeviceInformationResponse->Model=(char*)soap_malloc(pSoap,100);
tds__GetDeviceInformationResponse->FirmwareVersion=(char*)soap_malloc(pSoap,100);
tds__GetDeviceInformationResponse->SerialNumber=(char*)soap_malloc(pSoap,100);
tds__GetDeviceInformationResponse->HardwareId=(char*)soap_malloc(pSoap,100);

strcpy(tds__GetDeviceInformationResponse->Manufacturer,"Jeff Yang");
strcpy(tds__GetDeviceInformationResponse->Model,"Jeff Yang XXXX");
strcpy(tds__GetDeviceInformationResponse->FirmwareVersion,"v1.0.0.01");
strcpy(tds__GetDeviceInformationResponse->SerialNumber,"88998998888888");
strcpy(tds__GetDeviceInformationResponse->HardwareId,"123456789999");
return SOAP_OK;
}
/** Web service operation '__tds__SetSystemDateAndTime' (returns SOAP_OK or error code) */
SOAP_FMAC5 int SOAP_FMAC6 __tds__SetSystemDateAndTime(struct soap *pSoap, struct _tds__SetSystemDateAndTime *tds__SetSystemDateAndTime, struct _tds__SetSystemDateAndTimeResponse *tds__SetSystemDateAndTimeResponse){ return SOAP_OK; }
....

其他未使用到的函式,也需要填個回傳值否則將編譯錯誤
編譯,

1
2
gcc  -c -o mySoapStub.o mySoapStub.c
gcc soapC.o soapServer.o stdsoap2.o main.o duration.o mySoapStub.o -lpthread -o ws-server

把server端程式run起來,當按check時TestTool就會收到回傳值


ONVIF Device TestTool


以上,都是在已知server的IP和port情況下進行, e.g. http://192.168.56.101:80/onvif/device_service


一個client可以同時監控多個server
所以要一次得知單一或多個server的IP資訊需要透過multicast的方式(Discover Devices)
經由傳送資料到 239.255.255.250 port 3702(ws-discovery)
若網路上存在server,會回應此訊息
此部分可以參考
http://albert-oma.blogspot.tw/2013/09/onvif-ws-discovery-implementation.html
http://albert-oma.blogspot.tw/2013/09/onvif-ws-discovery-spec.html (A Introduction in Chinese)

小結:

符合ONVIF標準的client端(NVR)與server端(Devices),其包含兩主要流程
1.自動偵測Devices (multicast)
2.設定和取得Device資訊 (tcp)
將兩流程皆整合在server以及client端,相互對應,就可以使兩者以ONVIF標準方式進行溝通

Using shared Go library in C and C's callback in Go

1. Go functions can be easily executed from C applications.

libgoserver.go

1
2
3
4
5
6
7
package main 
import "C"
//export GoFccn
func GoFccn() {
}
func main() {
}

go build -buildmode=c-shared -o libgoserver.so libgoserver.go

main.c

1
2
3
4
5
6
#include "libgoserver.h" 

int main() {
GoFccn();
return 0;
}

gcc -o main main.c -lgoserver

2. Using C’s callback (function pointer) in Go.

libgoserver.go

1
2
3
4
5
6
7
8
9
10
11
12
package main
/*
#cgo LDFLAGS: -Wl,--unresolved-symbols=ignore-in-object-files -Wl,-allow-shlib-undefined
#include "xx.h"
*/

import "C"
//export GoFccn
func GoFccn() {
}
func main() {
C.pass_GoAdd((C.int)(10101))
}

xx.h

1
2
3
4
5
6
7
8
9
10
#ifndef XX_H
#define XX_H
#ifdef __cplusplus
extern "C" {
#endif
extern void pass_GoAdd(int);
#ifdef __cplusplus
}
#endif
#endif

go build -buildmode=c-shared -o libgoserver.so libgoserver.go

wfsserver.c

1
2
3
4
5
6
7
8
#include "libgoserver.h"
#include "wfsserver.h"
#include <stdio.h>
Callback gFun;
int wfs_server_run( Callback fun ){
gFun = fun;
gFun(c);
}

wfsserver.h

1
2
3
4
5
6
7
8
9
10
11
#ifndef __WFSSERVER_H__
#define __WFSSERVER_H__
#ifdef __cplusplus
extern "C" {
#endif
typedef void (*Callback)(int);
int wfs_server_run( Callback fun );
#ifdef __cplusplus
}
#endif
#endif

gcc -Wall -fpermissive -w -O3 -fPIC -shared -o libwfsserver.so wfsserver.c -lgoserver

main.c

1
2
3
4
5
6
7
8
9
10
11
12
13
#include "wfsserver.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

void cb_fun(int c){
printf("This is a C cb_fun %d.\n",c);
}

int main() {
wfs_server_run( cb_fun );
return 0;
}

gcc -o main main.c -lwfsserver

Di-Tech Competition

滴滴出行 大數據算法競賽

供需預測的目標是準確預測出給定地理區域在未來某個時間段的出行需求量及需求滿足量。
調研發現,同一地區不同時間段的訂單密度是不一樣的,
例如大型居住區在早高峰時段的出行需求比較旺盛,
而商務區則在晚高峰時段的出行需求比較旺盛。
如果能預測到在未來的一段時間內某些地區的出行需求量比較大,
就可以提前對營運車輛提供一些引導,指向性地提高部分地區的運力,
從而提升乘客的整體出行體驗。

初賽 2016/5/18~2016/6/18

成績排名前50名(含並列)的隊伍進入決賽

2016/6/15 上傳成績

排名 隊伍名稱 最優成績 最優成績提交日 最新成績 最新成績提交日
1 inferrrr 0.248673 2016-06-15 0.248673 2016-06-15

50 35 Bank St 0.265588 2016-06-12 0.284922 2016-06-15

198 Mr.J 0.289462 2016-06-15 0.289462 2016-06-15

Building Information Modeling - BIM

Recently I have been doing some reserch on BIM.

BIM file needs to be filtered out some parts then showing that on web…

Some tools use to display separately parts of IFC model

[1] BIMserver with BIMvie.ws.

 show whole building  filter out Level 1&2

[2] IfcStoreyView IFC model display 2D, 3D use OpenGL…

Test

architect.ifc -> IfcConvert to dae -> dae import into three.js

1
G:\BIM2>IfcConvert.exe architect.ifc a.dae
Scanning file...
Done scanning file
Creating geometry...
Done creating geometry
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
<!doctype html>
<html lang="en">
<head>
<title>Test</title>
<meta charset="utf-8">
</head>
<body style="margin: 0;">
<script src="three.min.js"></script>
<script src="ColladaLoader.js"></script>
<script src="OrbitControls.js"></script>
<script>
var scene, camera, renderer;
init();
animate();
function init() {
scene = new THREE.Scene();
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
renderer = new THREE.WebGLRenderer({antialias:true});
renderer.setSize(WIDTH, HEIGHT);
document.body.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(45, WIDTH / HEIGHT, 0.1, 10000);
camera.position.set(50,150,100);
scene.add(camera);
window.addEventListener('resize', function() {
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
renderer.setSize(WIDTH, HEIGHT);
camera.aspect = WIDTH / HEIGHT;
camera.updateProjectionMatrix();
});
var light = new THREE.PointLight(0xfffff3, 0.8);
light.position.set(-100,200,100);
scene.add(light);
var sphereSize = 1;
var pointLightHelper = new THREE.PointLightHelper( light, sphereSize );
scene.add( pointLightHelper );
var light2 = new THREE.PointLight(0xd7f0ff, 0.2);
light2.position.set(200,200,100);
scene.add(light2);
var sphereSize = 1;
var pointLightHelper2 = new THREE.PointLightHelper( light2, sphereSize );
scene.add( pointLightHelper2 );
var light3 = new THREE.PointLight(0xFFFFFF, 0.5);
light3.position.set(150,200,-100);
scene.add(light3);
var sphereSize = 1;
var pointLightHelper3 = new THREE.PointLightHelper( light3, sphereSize );
scene.add( pointLightHelper3 );
var loader = new THREE.ColladaLoader();
loader.options.convertUpAxis = true;
loader.load( 'a.dae', function ( collada ) {
var dae = collada.scene;
var skin = collada.skins[ 0 ];
dae.position.set(0,0,0);//x,z,y- if you think in blender dimensions ;)
dae.scale.set(1.5,1.5,1.5);
scene.add(dae);
var axes = new THREE.AxisHelper(50);
axes.position = dae.position;
scene.add(axes);
var gridXZ = new THREE.GridHelper(100, 10);
gridXZ.setColors( new THREE.Color(0x8f8f8f), new THREE.Color(0x8f8f8f) );
gridXZ.position.set(0,0,0 );
scene.add(gridXZ);
});
controls = new THREE.OrbitControls(camera, renderer.domElement);
}
function animate() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
controls.update();
}
</script>

</body>
</html>

Test

TODO:

Submit one BIM file (IFC format) -> filter out some parts -> remake IFC -> convert format(dae or json or…) -> display on web(three.js)

React.js work with Tornado

React work with tornado.

I would like to understand how React.js work, so just try to do it…

Based on Ubuntu 14.04 64bit
Prerequisite : node.js, webpack, tornado, fixed-data-table.

Flow

Run Tornado -> regularly get XML data from another website -> display with React

Install

  1. mkdir react-table && cd react-table
  2. npm init
  3. npm install webpack –save-dev
  4. npm install babel-loader babel-core babel-preset-es2015 babel-preset-react –save-dev
  5. ln -s /usr/bin/nodejs /usr/bin/node
  6. npm install –save react
  7. npm install –save react-dom
  8. npm install webpack-dev-server –save-dev
  9. npm install fixed-data-table –save
  10. npm install css-loader style-loader –save-dev

React part

touch webpack.config.js

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
/* webpack.config.js */
var path = require('path');
var webpack = require('webpack');

module.exports = {
context: __dirname + "/src",
entry: "./app.js",
output: {
filename: "app.js",
path: __dirname + "/dist",
},
module: {
loaders: [
{
test: /\.jsx?$/,
include: [
__dirname + "/src"
],
exclude: /node_modules/,
loader: 'babel',
query: {
presets: ['react', 'es2015']
}
}, { test: /\.css$/, loader: 'style-loader!css-loader' },
]
},
};

touch myTable.js

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
/* myTable.js */
"use strict";
var FakeObjectDataListStore = require('./dataListStore');
var FixedDataTable = require('fixed-data-table');
var React = require('react');

const {Table, Column, Cell} = FixedDataTable;
const TextCell = ({rowIndex, data, col, ...props}) => (
<Cell {...props}>
{data.getObjectAt(rowIndex)[col]}
</Cell>
);

class MyLinkCell extends React.Component {
render() {
const {rowIndex, data, col, ...props} = this.props;
const link = data.getObjectAt(rowIndex)[col];
return (
<Cell {...props}>
<a href={link}>{link}</a>
</Cell>
);

}
}
class DataListWrapper {
constructor(indexMap, data) {
this._indexMap = indexMap;
this._data = data;
}
getSize() {
return this._indexMap.length;
}
getObjectAt(index) {
return this._data.getObjectAt(
this._indexMap[index]
);
}
}
class MyTable extends React.Component {
constructor(props) {
super(props);
//=========================
var pageData = JSON.parse(document.getElementById('page-data').getAttribute('data-page'));
//========================
this._dataList = new FakeObjectDataListStore(pageData);
this.state = {
filteredDataList: this._dataList,
};
this._onFilterChange = this._onFilterChange.bind(this);
}
_onFilterChange(e) {
if (!e.target.value) {
this.setState({
filteredDataList: this._dataList,
});
}
var filterBy = e.target.value;
var size = this._dataList.getSize();
var filteredIndexes = [];
for (var index = 0; index < size; index++) {
var {TITLE} = this._dataList.getObjectAt(index);
if (TITLE.indexOf(filterBy) !== -1){
filteredIndexes.push(index);
}
}
this.setState({
filteredDataList: new DataListWrapper(filteredIndexes, this._dataList),
});
}
render() {
var {filteredDataList} = this.state;
return (
<div>
<input
onChange={this._onFilterChange}
placeholder="Filter by 職稱"
/>

<br />
<Table
rowHeight={50}
rowsCount={filteredDataList.getSize()}
headerHeight={50}
width={1000}
height={500}
{...this.props}>

<Column
header={<Cell>職稱</Cell>}

cell={<TextCell data={filteredDataList} col="TITLE" />}
fixed={true}
width={100}
/>
<Column
header={<Cell>用人單位</Cell>}

cell={<TextCell data={filteredDataList} col="ORG_NAME" />}
fixed={true}
width={100}
/>
<Column
header={<Cell>工作區域</Cell>}

cell={<TextCell data={filteredDataList} col="WORK_PLACE_TYPE" />}
width={100}
/>
<Column
header={<Cell>人員區分</Cell>}

cell={<TextCell data={filteredDataList} col="GENDER_TYPE" />}
width={100}
/>
<Column
header={<Cell>開始日</Cell>}

cell={<TextCell data={filteredDataList} col="DATE_FROM" />}
width={100}
/>
<Column
header={<Cell>結束日</Cell>}

cell={<TextCell data={filteredDataList} col="DATE_TO" />}
width={100}
/>
<Column
header={<Cell>網址</Cell>}

cell={<MyLinkCell data={filteredDataList} col="VIEW_URL" />}
width={400}
/>
</Table>
</div>
);

}
}
module.exports = MyTable;

touch dataListStore.js

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
/* dataListStore.js */
class dataListStore {
constructor( rows ){
this.size = rows.length;
this._cache = rows;
}
getObjectAt(/*number*/ index) /*?object*/ {
if (index < 0 || index > this.size){
console.log('_QQca -> ',this._cache[index],index);
return undefined;
}
if (this._cache[index] === undefined) {
console.log('_cache undefined!!!!! ');
}
return this._cache[index];
}
getAll() {
if (this._cache.length < this.size) {
for (var i = 0; i < this.size; i++) {
this.getObjectAt(i);
}
}
return this._cache.slice();
}
getSize() {
return this.size;
}
}
module.exports = dataListStore;

touch app.js

1
2
3
4
5
6
/* app.js */
import MyTable from './myTable'
import React from 'react'
import ReactDOM from 'react-dom'

ReactDOM.render(<MyTable />,document.getElementById('example'));

run webpack
root@ubuntu:/home/jeff/react-table# webpack

copy app.js to Tornado part
root@ubuntu:/home/jeff/react-table# mkdir -p ../react-tornado/static/js && mkdir ../react-tornado/static/css && mkdir -p ../react-tornado/templates
root@ubuntu:/home/jeff/react-table# cp fixed-data-table.css ../react-tornado/static/css/
root@ubuntu:/home/jeff/react-table# cp ./dist/app.js ../react-tornado/static/js/

Tornado part

/react-tornado

touch /react-tornado/templates/index.html

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<title>Govjobs</title>
<link href="/static/css/fixed-data-table.css"
rel="stylesheet">

</head>
<body style="font-family:arial;font-size:12px; background:#e1e1e1">
<p></p>
<div id="example"></div>
<div id="page-data" data-page=""></div>
<script src="/static/js/app.js"></script>
</body>
</html>

touch /react-tornado/main.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# main.py
# -*- coding: utf-8 -*-
import os,json, time, datetime
import tornado.httpserver
import tornado.options
from tornado import gen
from tornado import web, ioloop, websocket
###
import xml.etree.cElementTree as ET
from apscheduler.schedulers.background import BackgroundScheduler
import urllib3
###

settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"template_path": os.path.join(os.path.dirname(__file__), "templates"),
"autoreload": True
}

class graphHandler(tornado.web.RequestHandler):
@gen.coroutine
def i(self):
return self.loadData()
@gen.coroutine
def get(self):
response, announceDate = yield self.i()
self.render("index.html", a=response,b=announceDate)
def loadData(self):
#saveData()
module_dir = os.path.dirname(__file__)
file_path = os.path.join(module_dir, 'export.xml')
A=[]
tree = ET.parse(file_path)
root = tree.getroot()
ANNOUNCE_DATE = root.find('ANNOUNCE_DATE').text
for country in root.findall('ROW'):
A.append({
'TITLE' : country.find('TITLE').text,
'ORG_NAME' : country.find('ORG_NAME').text,
'WORK_PLACE_TYPE' : country.find('WORK_PLACE_TYPE').text,
'GENDER_TYPE' : country.find('GENDER_TYPE').text,
'DATE_FROM' : country.find('DATE_FROM').text,
'DATE_TO' : country.find('DATE_TO').text,
'VIEW_URL' : country.find('VIEW_URL').text
})
return json.dumps(A, ensure_ascii=False),ANNOUNCE_DATE

def saveData():
http = urllib3.PoolManager()
r = http.request('GET', 'http://web3.dgpa.gov.tw//WANT03FRONT//AP//WANTF00003.aspx?GETJOB=Y')
if r.status == 200:
module_dir = os.path.dirname(__file__)
file_path = os.path.join(module_dir, 'export.xml')
with open(file_path, "wb") as code:
code.write(r.data)

sched = BackgroundScheduler()

def scheduled_job():
saveData()

def main():
application = tornado.web.Application([
(r"/", graphHandler)
], **settings)

http_server = tornado.httpserver.HTTPServer(application)
port = int(os.environ.get("PORT", 5000))
http_server.listen(port)

sched.add_job(scheduled_job, 'cron', day_of_week='mon-fri', hour=18, minute=00)
sched.start()

tornado.ioloop.IOLoop.instance().start()

if __name__ == "__main__":
main()

Run
python main.py

Demo

There are troubles to get data regularly on heroku has some limits on http request and the file save…

https://govjobs.herokuapp.com/

Work fine on local

 show result

Time Series Data Visualization 2

Prerequisite : tornado , c3.js , influxDB

Time Series Data Visualization

I would like to create a realtime time series chart and pulling data from influxDB, but barely found a suitable websocket soulation with django.

So,I changed from django to tornado easily achieved websocket.

Based on Ubuntu 14.04 64bit
tornado
pip3 install tornado

views.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# -*- coding: utf-8 -*-
import os
import json, time, datetime
import tornado.options
#import tornado.httpclient
from tornado import gen
from tornado import web, ioloop, websocket
##
import timeSeriesDB
import pe
##
tornado.options.define("port", default=8888, help="message...", type=int)

class getData:
def __init__(self):
pass

def i(self):
tsDB = timeSeriesDB.tsDB()
x1,d1,x2,d2=tsDB.read_data()
#print("Result: ", d1 )
chart = [ x1,x2,d1,d2 ]
return chart

class graphHandler(tornado.web.RequestHandler):
@gen.coroutine
def i(self):
gd = getData()
chart = gd.i()
return chart

@gen.coroutine
def get(self):
response = yield self.i()
self.render("index.html", chart_columns=json.dumps(response),a=23.53,b=1,c=12.8,
d=26.075,e=72.325,f=1,
g=1.7,h=0,i=30.8)

class calaHandler(tornado.web.RequestHandler):
@gen.coroutine
def i(self,X,y):
return pe.Online_One_PE(X,y)

@gen.coroutine
def get(self):
a = self.get_argument('a')
b = self.get_argument('b')
c = self.get_argument('c')
d = self.get_argument('d')
e = self.get_argument('e')
f = self.get_argument('f')
g = self.get_argument('g')
h = self.get_argument('h')
i = self.get_argument('i')
a = float(a)
b = float(b)
c = float(c)
d = float(d)
e = float(e)
f = float(f)
g = float(g)
h = float(h)
i = float(i)
X = [a, b, c, d, e, f, g, h, i]
y = [1]
Xy_predict = yield self.i(X,y)
self.write(str(Xy_predict[0]))

class websocketManager(object):
users = []
@classmethod
def add_user(cls, websocket):
cls.users.append(websocket)

@classmethod
def remove_user(cls, websocket):
cls.users.remove(websocket)

class Socket(websocket.WebSocketHandler):
def open(self):
websocketManager.add_user(self)
tornado.ioloop.IOLoop.instance().add_timeout(datetime.timedelta(seconds=1), self.start)

def on_close(self):
websocketManager.remove_user(self)

def on_message(self, message):
pass

def start(self):
chart = getData().i()
chart_cols=json.dumps(chart)
for user in websocketManager.users:
user.write_message(chart_cols)
tornado.ioloop.IOLoop.instance().add_timeout(datetime.timedelta(seconds=10), self.start)

settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"template_path": os.path.join(os.path.dirname(__file__), "templates"),
"autoreload": True
}

def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", graphHandler),
(r"/q/", calaHandler),
(r"/socket", Socket),
], **settings)
application.listen(tornado.options.options.port)
tornado.ioloop.IOLoop.current().start()

if __name__ == "__main__":
main()

index.html

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<title>C3 DataTable</title>
<script src="/static/js/jquery-1.12.2.min.js"></script>
<!-- Load c3.css -->
<link href="/static/css/c3.min.css" rel="stylesheet" type="text/css">
<!-- Load d3.js and c3.js -->
<script src="/static/js/d3.min.js" charset="utf-8"></script>
<script src="/static/js/c3.min.js"></script>
</head>
<body>
<div id="chart"></div>

<p>INPUT</p>
<form action="/q/" method="get">
Mean cooling tower leaving water temperture: <input type="text" id="a" name="a" value=> <br>
Zone pumps number: <input type="text" id="b" name="b" value=> <br>
Mean zone pumps leaving water temperture: <input type="text" id="c" name="c" value=> <br>
Mean cooling container temperture: <input type="text" id="d" name="d" value=> <br>
Mean cooling container humidity: <input type="text" id="e" name="e" value=> <br>
Chiller DPS: <input type="text" id="f" name="f" value=> <br>
Chiller DPT01: <input type="text" id="g" name="g" value=> <br>
Chiller compressor number: <input type="text" id="h" name="h" value=> <br>
Total it pw: <input type="text" id="i" name="i" value=> <br>
<p>RESULT: <span id='result'></span></p>
<button type="button" id='sum'>Submit</button>
</form>

<script>
$(document).ready(function(){
$("#sum").click(function(){
var a = $("#a").val();
var b = $("#b").val();
var c = $("#c").val();
var d = $("#d").val();
var e = $("#e").val();
var f = $("#f").val();
var g = $("#g").val();
var h = $("#h").val();
var i = $("#i").val();
$.get("/q/",{'a':a,'b':b,'c':c,'d':d,'e':e,'f':f,'g':g,'h':h,'i':i}, function(ret){
$('#result').html(ret)
})
});
});

var columns = {% raw chart_columns %}

var chart = c3.generate({
bindto: '#chart',
data: {
xs: {
'ori': 'x1',
'pred': 'x2',
},
xFormat: '%Y-%m-%dT%H:%M:%SZ',
columns: columns,
onclick: function (d, i) { /* ... */ }
},
axis: {
x: {
type: 'timeseries',
localtime: true,
tick: {
format: '%Y-%m-%d %H:%M:%S'
}
}
}
});

function chartUpdateFun(cols){
var colsArray = JSON.parse(cols);
chart.load({
columns: colsArray
});
}

var chatObj = {
host: location.host,
socket: null,
init: function(){
var url = "ws://" + chatObj.host + "/socket";
chatObj.socket = new WebSocket(url);
chatObj.socket.onmessage = function(event){
chartUpdateFun(event.data);
},
chatObj.socket.onclose = function(event){
console.log("on close");
},
chatObj.socket.onerror = function(event){
console.log("on error");
}
}
};
chatObj.init();
</script>

</body>
</html>

Show

 show result

Time Series Data Visualization

Prerequisite : django , c3.js , influxDB

Based on Ubuntu 14.04 64bit
django

  1. Insatll virtualenv , pip….
  2. virtualenv –distribute myApp
  3. pip3 install django
  4. django-admin startproject myWebsite
  5. python3 manage.py runserver 127.0.0.1:8889

influxDB

  1. curl -sL https://repos.influxdata.com/influxdb.key | sudo apt-key add -
  2. source /etc/lsb-release
  3. echo “deb https://repos.influxdata.com/${DISTRIB_ID,,} ${DISTRIB_CODENAME} stable” | sudo tee /etc/apt/sources.list.d/influxdb.list
  4. sudo apt-get update && sudo apt-get install influxdb
  5. service influxdb start

c3.js

  1. download c3.min.js,d3.min.js,c3.min.css…

InfluxDB’s content

influxDB's content

views.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from .models import Greeting
from django.shortcuts import *
from django.template import RequestContext
from django.http import HttpResponseRedirect
import json, time
##
from h import timeSeriesDB
from h import pe
##
#
def index(request):
tsDB = timeSeriesDB.tsDB()
x1,d1,x2,d2=tsDB.read_data()
print("Result: ", d1 )
chart = [ x1,x2,d1,d2 ]
return render(request, 'index.html',
{'chart_columns': json.dumps(chart),'a': 23.53,'b': 1,'c': 12.8,
'd': 26.075,'e': 72.325,'f': 1,
'g': 1.7,'h': 0,'i': 30.8})

def search(request):
a = request.GET['a']
b = request.GET['b']
c = request.GET['c']
d = request.GET['d']
e = request.GET['e']
f = request.GET['f']
g = request.GET['g']
h = request.GET['h']
i = request.GET['i']
a = float(a)
b = float(b)
c = float(c)
d = float(d)
e = float(e)
f = float(f)
g = float(g)
h = float(h)
i = float(i)
X = [a, b, c, d, e, f, g, h, i]
y = [1]
Xy_predict = pe.Online_One_PE(X,y)

return HttpResponse(str(Xy_predict[0]))

index.html

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<title>Time Series Data Visualization</title>
<script src="/static/jquery-1.12.2.min.js"></script>
<!-- Load c3.css -->
<link href="/static/c3.min.css" rel="stylesheet" type="text/css">
<!-- Load d3.js and c3.js -->
<script src="/static/d3.min.js" charset="utf-8"></script>
<script src="/static/c3.min.js"></script>
</head>
<body>
<div id="chart"></div>
<p>INPUT</p>
<form action="/add/" method="get">
Mean cooling tower leaving water temperture: <input type="text" id="a" name="a" value=> <br>
Zone pumps number: <input type="text" id="b" name="b" value=> <br>
Mean zone pumps leaving water temperture: <input type="text" id="c" name="c" value=> <br>
Mean cooling container temperture: <input type="text" id="d" name="d" value=> <br>
Mean cooling container humidity: <input type="text" id="e" name="e" value=> <br>
Chiller DPS: <input type="text" id="f" name="f" value=> <br>
Chiller DPT01: <input type="text" id="g" name="g" value=> <br>
Chiller compressor number: <input type="text" id="h" name="h" value=> <br>
Total it pw: <input type="text" id="i" name="i" value=> <br>
<p>RESULT: <span id='result'></span></p>
<button type="button" id='sum'>Submit</button>
</form>
<script>
$(document).ready(function(){
$("#sum").click(function(){
var a = $("#a").val();
var b = $("#b").val();
var c = $("#c").val();
var d = $("#d").val();
var e = $("#e").val();
var f = $("#f").val();
var g = $("#g").val();
var h = $("#h").val();
var i = $("#i").val();
$.get("/search/",{'a':a,'b':b,'c':c,'d':d,'e':e,'f':f,'g':g,'h':h,'i':i}, function(ret){
$('#result').html(ret)
})
});
});

var columns = ;
var chart = c3.generate({
bindto: '#chart',
data: {
xs: {
'ori': 'x1',
'pred': 'x2',
},
xFormat: '%Y-%m-%dT%H:%M:%SZ',
columns: columns,
onclick: function (d, i) { /* ... */ }
},
axis: {
x: {
type: 'timeseries',
localtime: true,
tick: {
format: '%Y-%m-%d %H:%M:%S'
}
}
}
});
</script>

</body>
</html>

Show

show result

Regressor test with Skflow and Sklearn - 2

Test two different types of learning methods - offline learning and online learning

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# -*- coding: utf-8 -*-
from sqlalchemy import *
from threading import Timer
import signal, sys, time, threading
import configparser
from datetime import datetime
###
import pandas
import pickle
from sklearn import datasets, cross_validation, metrics
from sklearn import preprocessing
###
from sklearn.linear_model import PassiveAggressiveRegressor
from sklearn.svm import SVR
###############################################################################
class PredictData:
def __init__(self, partial_fit_flag=True):
self.partial_fit_flag = partial_fit_flag
self.regressor = None

def fit(self, X_train, y_train):
if self.partial_fit_flag:
self.regressor.partial_fit(X_train, y_train)
else:
self.regressor.fit(X_train, y_train)

def train(self, X_train, y_train):
if self.regressor == None:
if self.partial_fit_flag:
self.regressor = PassiveAggressiveRegressor(random_state=42)
# regressor = SGDRegressor(random_state=42)
else:
self.regressor = SVR(kernel='rbf', C=1000, gamma=0.1)
self.fit(X_train, y_train)
else:
if self.partial_fit_flag:
self.fit(X_train, y_train)

def predict(self, X_test):
return self.regressor.predict(X_test)

def metrics_errror(self, y_predict, y_test):
return metrics.mean_squared_error(y_predict,y_test)

def save_model(self, name):
if self.regressor != None:
with open(name, 'wb') as m:
pickle.dump(self.regressor, m)

def load_model(self, name):
with open(name, 'rb') as m:
self.regressor = pickle.load(m)
###############################################################################
class ProcessData:
def __init__(self, scaler_partialfit_flag=True):
self.scaler_partialfit_flag = scaler_partialfit_flag
self.scaler = None

def fit(self, X_train):
#if self.scaler_partialfit_flag:
self.scaler.partial_fit(X_train)
#else:
# self.scaler.fit(X_train)

def train_test_split_scale(self, data, target, testSize=0):
X_train, X_test, y_train, y_test = cross_validation.train_test_split(data, target,
test_size=testSize, random_state=42)
if self.scaler == None:
self.scaler = preprocessing.MinMaxScaler(feature_range=(-1, 1))
# preprocessing.StandardScaler()
self.fit(X_train)
if len(X_test) == 0:
X_train = self.scaler.transform(X_train)
return X_train, y_train
else:
X_train = self.scaler.transform(X_train)
X_test = self.scaler.transform(X_test)
return X_train, y_train, X_test, y_test

def validate_date(self,datetime_string):
try:
return datetime.strptime(datetime_string,"%Y-%m-%d %H:%M:%S")
except ValueError:
return False

def set_dateTime(self,datetime_string):
if self.validate_date(datetime_string) != False:
configW = configparser.RawConfigParser()
configW.optionxform = str
configW.add_section('DB')
configW.set('DB', 'LatestTime', datetime_string)
with open('DB.ini', 'w') as configfile:
configW.write(configfile)

def get_dateTime(self):
configR = configparser.RawConfigParser()
configR.read("DB.ini")
db_time=configR.get("DB","LatestTime")
if self.validate_date(db_time) == False:
return 0
else:
return db_time

def load_data_from_db(self, alwaysLatestFlag=False):
pass

def load_data_from_file(self, path):
try:
data = pandas.read_csv(path)
X = data[['Date','Time','Global_reactive_power','Voltage','Global_intensity','Sub_metering_1','Sub_metering_2','Sub_metering_3']]
y = data['Global_active_power']

D = pandas.to_datetime(X["Date"], format="%d/%m/%Y")
X["Month"] = D.apply(lambda x: x.month)
X = X.drop(["Date"], axis=1)

T = pandas.to_datetime(X["Time"], format="%H:%M:%S")
X["Hour"] = T.apply(lambda x: x.hour)
X = X.drop(["Time"], axis=1)
return X,y
except:
print('ERROR load_data_from_file!!!' )
return [],[]

def save_scaler(self, scalerName):
if self.scaler != None:
with open(scalerName, 'wb') as s:
pickle.dump(self.scaler, s)

def load_scaler(self, scalerName):
with open(scalerName, 'rb') as s:
self.scaler = pickle.load(s)
###############################################################################
def Online_Learning_Train():
pd = ProcessData()
X,y = pd.load_data_from_file('pw3.csv')
Xt,Xyt = pd.train_test_split_scale(X,y)
pdd = PredictData()
pdd.train(Xt, Xyt)
Xy_predict = pdd.predict(Xt)
scoreX = pdd.metrics_errror(Xy_predict,Xyt)
pd.save_scaler('scaler.pkl')
pdd.save_model('model.pkl')
print('MSE: {0:f}'.format(scoreX))

def Online_Learning_Test():
pd = ProcessData()
pd.load_scaler('scaler.pkl')
X,y = pd.load_data_from_file('pw5.csv')
Xt,Xyt = pd.train_test_split_scale(X,y)
pdd = PredictData()
pdd.load_model('model.pkl')
pdd.train(Xt, Xyt)
Xy_predict = pdd.predict(Xt)
scoreX = pdd.metrics_errror(Xy_predict,Xyt)
print('MSE: {0:f}'.format(scoreX))

def Offline_Learning_Train():
pd = ProcessData()
X,y = pd.load_data_from_file('pw.csv')
Xt,Xyt = pd.train_test_split_scale(X,y)
pdd = PredictData(False)
pdd.train(Xt, Xyt)
Xy_predict = pdd.predict(Xt)
scoreX = pdd.metrics_errror(Xy_predict,Xyt)
pd.save_scaler('scaler.pkl')
pdd.save_model('model.pkl')
print('MSE: {0:f}'.format(scoreX))

def Offline_Learning_Test():
pd = ProcessData()
pd.load_scaler('scaler.pkl')
X,y = pd.load_data_from_file('pw5.csv')
Xt,Xyt = pd.train_test_split_scale(X,y)
pdd = PredictData(False)
pdd.load_model('model.pkl')
Xy_predict = pdd.predict(Xt)
scoreX = pdd.metrics_errror(Xy_predict,Xyt)
print('MSE: {0:f}'.format(scoreX))

def Main():
Online_Learning_Train()
Online_Learning_Test()
'''
Offline_Learning_Train()
Offline_Learning_Test()
'''

if __name__ == '__main__':
Main()

Data preprocessing
pw.csv from Data Set

Date,Time,Global_active_power,Global_reactive_power,Voltage,Global_intensity,Sub_metering_1,
Sub_metering_2,Sub_metering_3
16/12/2006,17:24:00,4.216,0.418,234.840,18.400,0.000,1.000,17.000