forked from warmspringwinds/pytorch-cpp
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnumpy.cpp
More file actions
55 lines (47 loc) · 1.15 KB
/
numpy.cpp
File metadata and controls
55 lines (47 loc) · 1.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
/*
Example shows how to repeat numpy operations using pytorch Tensor
*/
#include "ATen/ATen.h"
#include "ATen/Type.h"
#include <math.h>
using namespace at;
using namespace std;
void basic_sigmoid(int x) {
double s = 1/(1+exp(-x));
cout << s << endl;
}
void tensor_sigmoid(Tensor x) {
x = x*-1;
Tensor s = 1/(1+x.exp());
cout << s << endl;
}
void tensor_sigmoid_derv(Tensor x) {
Tensor s = x.sigmoid();
s = s*(1-s);
cout << s << endl;
}
void reshape() {
Tensor a = CPU(kFloat).rand({3, 3, 2});
cout << a.sizes() << endl;
a.resize_({a.sizes()[0]*a.sizes()[1]*a.sizes()[2], 1});
cout << a.sizes() << endl;
cout << a << endl;
}
void norm() {
float float_buffer[] = {0, 3, 4,
1, 6, 4};
Tensor a = CPU(kFloat).tensorFromBlob(float_buffer, {2,3});
cout << a << endl;
a = a.norm(0, 1, true);
cout << a << endl;
}
int main() {
// TODO: start developing numpy alike methods
// basic_sigmoid(3);
float float_buffer[] = {1,2,3};
tensor_sigmoid(CPU(kFloat).tensorFromBlob(float_buffer, {1,3}));
// tensor_sigmoid_derv(CPU(kFloat).tensorFromBlob(float_buffer, {1,3}));
// reshape();
// norm();
return 0;
}