Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
67 commits
Select commit Hold shift + click to select a range
820e426
'Declare' & skeleton for 'function_transformation'
DelphianCalamity Jun 11, 2018
ff23c61
Added the inlining feature
DelphianCalamity Jun 13, 2018
3d60a2f
Transformation_v.1
DelphianCalamity Jun 16, 2018
26d33f9
Transformation_v.2
DelphianCalamity Jun 17, 2018
843c3cb
Preparing framework/core for new ops Call/Return/NextCall
DelphianCalamity Jun 17, 2018
cf1b2e8
Transformation v.3
DelphianCalamity Jun 19, 2018
5b8715e
Transformation_v.4
DelphianCalamity Jun 20, 2018
7113382
Call/Return kernels
DelphianCalamity Jun 25, 2018
fc1114a
Common Runtime -ongoing
DelphianCalamity Jun 30, 2018
2658ed6
Fixed Executor for mutually recursive functions
DelphianCalamity Jul 2, 2018
87738e1
Fixed bug in transformation
DelphianCalamity Jul 2, 2018
37ffe26
Remove unnecessary changes to files
acharal Jul 3, 2018
d113ee5
Remove (more) unnecessary edits
acharal Jul 3, 2018
5cdeeaf
Reformat and reident
acharal Jul 3, 2018
921a3b5
Remove newline
acharal Jul 3, 2018
605033a
Reformat executor code
acharal Jul 3, 2018
3830c4b
Reformat executor code
acharal Jul 3, 2018
1b721a0
Minor reformat in executor
acharal Jul 3, 2018
8193c33
Minor in executor
acharal Jul 3, 2018
c1a22d8
Set constant folding off instead of commenting out the code
acharal Jul 3, 2018
024eab1
Missing indentation
acharal Jul 3, 2018
1678395
Missing indentation
acharal Jul 3, 2018
bfd1183
Missing indentation
acharal Jul 3, 2018
8011181
Reindent function transformation; hopefully it compiles
acharal Jul 3, 2018
4a9907c
Fixes (possibly) the conflict with r1.4
acharal Jul 3, 2018
b12cc77
Fixes (possibly) the conflict with r1.4
acharal Jul 3, 2018
e7a9899
Split GetNextIterationCallNode to two separate functions
acharal Jul 3, 2018
5ac76f6
Update functions.{cc,h} with the original files.
acharal Jul 3, 2018
9847852
Move Call/Return kernels and ops to separate file
acharal Jul 3, 2018
ab40d1e
Fix compilation hiccups
acharal Jul 3, 2018
a067cbc
Include function_control_ops in test deps
acharal Jul 4, 2018
b32196a
Fixes in Build for unit testing
DelphianCalamity Jul 4, 2018
9d2564a
Merge remote-tracking branch 'origin/r1.4_recursion' into r1.4_recursion
DelphianCalamity Jul 4, 2018
a923f21
Common Runtime
DelphianCalamity Jul 4, 2018
5f463c6
restoring utils/functions.cc
DelphianCalamity Jul 5, 2018
b59658a
Possibly fixed Constant Folding
DelphianCalamity Jul 5, 2018
7475963
Possibly fixed Constant Folding
DelphianCalamity Jul 5, 2018
2aeca94
Deleting copyrights
DelphianCalamity Jul 5, 2018
99d3cc2
Enabled Topological Sort
DelphianCalamity Jul 5, 2018
7e51628
Change a condition in executor to the equivalent IsCall
acharal Jul 5, 2018
896846f
Add license preamble to new files
acharal Jul 5, 2018
55d080e
Merge remote-tracking branch 'recursion/r1.4' into r1.4_recursion_merge
acharal Jul 5, 2018
55603da
Use functions.cc from commit 7013a5
acharal Jul 5, 2018
6d0f462
Some Test Cases
DelphianCalamity Jul 6, 2018
c514d2a
Merge remote-tracking branch 'origin/r1.4_recursion' into r1.4_recursion
DelphianCalamity Jul 6, 2018
92bcceb
Fixed Topological Sort
DelphianCalamity Jul 6, 2018
b95938d
Mini optimization in Topological Sort
DelphianCalamity Jul 6, 2018
0d193af
Renamed CallOp class
DelphianCalamity Jul 10, 2018
6529bbc
Add Ackermann function as test
acharal Jul 11, 2018
c953c6a
Add some more classic benchmarks
acharal Jul 11, 2018
8cf77f7
Change indentation in function trans
acharal Jul 13, 2018
0afc0e5
Change some more indentation
acharal Jul 13, 2018
e01e329
Move typedef to cc
acharal Jul 13, 2018
3d3cfcc
Optimized Tags
DelphianCalamity Jul 13, 2018
bb403ff
"Change MakeFrameFunctions to MakeFrameName;rely on overloading"
DelphianCalamity Jul 13, 2018
599c274
More Opts
DelphianCalamity Jul 14, 2018
6ce4f1a
Change frame lock mutex and typos
acharal Jul 16, 2018
7223ca7
Minor change the creation of a frame
acharal Jul 16, 2018
0bf53cd
Refactor synonym_frames
acharal Jul 16, 2018
d53d6d9
Remove var ref in synonym_frame_names
acharal Jul 16, 2018
3ca7c5b
Untangle child_name-ing and parallel iters
acharal Jul 16, 2018
d034dc2
Guard GetNodeAttr to access it when it's necessary
acharal Jul 16, 2018
3e3eac0
Use parent_frame not null check
acharal Jul 17, 2018
8fb3e55
Local executor optimizations (#8)
acharal Jul 20, 2018
4c192b4
bugs fixed while working on distr
DelphianCalamity Sep 5, 2018
f47355a
ackermann
DelphianCalamity Sep 15, 2018
0497209
Distributed Runtime (#11)
DelphianCalamity Sep 17, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions TESTS/2DimensionOutput.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import tensorflow as tf
from tensorflow.python.framework import function

fac = function.Declare("Fac", [("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, func_name="Fac", out_names=["ret"])
def FacImpl(n):
return tf.cond(tf.less_equal(n, 1),
lambda: tf.constant([1,1]),
lambda: [n,n]*fac(n-1))


FacImpl.add_to_graph(tf.get_default_graph())

n = tf.placeholder(tf.int32, shape=[])
x = tf.add(n, 1)
result = fac(x)
y = tf.add(result, [1,1])

#print(tf.get_default_graph().as_graph_def())

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

sess = tf.Session()
print(sess.run(y, feed_dict={n: 5}))

writer.close()

sess.close()
44 changes: 44 additions & 0 deletions TESTS/ackermann.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import tensorflow as tf
from tensorflow.python.framework import function

ack = function.Declare("Ack", [("m", tf.int32), ("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, tf.int32, func_name="Ack", out_names=["ret"])
def AckImpl(m,n):

def f1():
ret = n + 1
return ret

def f2():
def ff1():
r = ack(m-1,1)
return r

def ff2():
r = ack(m-1, ack(m, n-1))
return r

ret = tf.cond(tf.equal(n, 0), ff1, ff2)
return ret

return tf.cond(tf.equal(m, 0), f1, f2)


AckImpl.add_to_graph(tf.get_default_graph())

n = tf.placeholder(tf.int32, shape=[])
m = tf.placeholder(tf.int32, shape=[])
res = ack(m,n)

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

sess = tf.Session()

#print(tf.get_default_graph().as_graph_def())

print(sess.run(res, feed_dict={m:2, n:3}))

sess.close()

writer.close()
13 changes: 13 additions & 0 deletions TESTS/create_worker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Get task number from command line
import sys
task_number = int(sys.argv[1])

import tensorflow as tf

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})
server = tf.train.Server(cluster, job_name="local", task_index=task_number)

print("Starting server #{}".format(task_number))

server.start()
server.join()
40 changes: 40 additions & 0 deletions TESTS/distributed/distr_factorial.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

fac = function.Declare("Fac", [("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, func_name="Fac", out_names=["ret"])
def FacImpl(n):

def f1():
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
ret = tf.constant(1)
return ret
def f2():
with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
ret = n * fac(n - 1)
return ret

with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
pred = tf.less_equal(n, 1)

return tf.cond(pred, f1, f2)

FacImpl.add_to_graph(tf.get_default_graph())

n = tf.placeholder(tf.int32, shape=[])
x = tf.add(n, 1)
result = fac(x)
y = tf.add(result, 1)

#print(tf.get_default_graph().as_graph_def())

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run(y, feed_dict={n: 5}))

writer.close()

44 changes: 44 additions & 0 deletions TESTS/distributed/distr_fcallsg.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

@function.Defun(tf.float32)
def G(x):

with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
ret = x + x

return ret


@function.Defun(tf.float32, tf.float32)
def MyFunc(x, y):

with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
g1 = G(x)
g2 = G(y)

ret = g1 + g2

return ret


# Building the graph.

a = tf.constant([4.0], name="a")
b = tf.placeholder(tf.float32, name="MyPlaceHolder")

add = tf.add(a, b, name="add")
sub = tf.subtract(a, b, name="sub")

ret = MyFunc(add, sub, name='mycall')

#x = tf.add(c, d)

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run([ret], feed_dict={b:1}))

writer.close()
39 changes: 39 additions & 0 deletions TESTS/distributed/distr_fibonacci.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

fib = function.Declare("Fib", [("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, func_name="Fib", out_names=["ret"])
def FibImpl(n):

def f1():
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
ret = tf.constant(1)
return ret
def f2():
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
fib1 = fib(n-1)
with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
fib2 = fib(n-2)

return fib1 + fib2

return tf.cond(tf.less_equal(n, 1), f1, f2)

FibImpl.add_to_graph(tf.get_default_graph())

n = tf.placeholder(tf.int32, shape=[])
x = fib(n)

res = tf.add(x, 1)

#print(tf.get_default_graph().as_graph_def())

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run(res, feed_dict={n: 20}))

writer.close()
36 changes: 36 additions & 0 deletions TESTS/distributed/distr_fog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

@function.Defun(tf.float32)
def G(x):
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
add = x + 1
with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
ret = x * add
return ret

@function.Defun(tf.float32)
def F(x):
with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
add = x + 1
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
ret = x * add
return ret


a = tf.constant([4.0], name="a")
b = tf.placeholder(tf.float32, name="MyPlaceHolder")

add = tf.add(a, b, name="add")

ret = F(G(add), name='mycall')

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run([ret], feed_dict={b:1}))

writer.close()

36 changes: 36 additions & 0 deletions TESTS/distributed/distr_funcSimple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

@function.Defun(tf.int32, tf.int32)
def MyFunc(x, y):

with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
add1 = x + y

return [add1, x - y]


# Building the graph.

a = tf.constant([4], name="x")
b = tf.placeholder(tf.int32, name="MyPlaceHolder")

with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
add = tf.add(a, b, name="add")

with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
sub = tf.subtract(a, b, name="sub")

[c,d] = MyFunc(add, sub, name='mycall')

x = tf.add(c, d)

#print(tf.get_default_graph().as_graph_def())

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run([x], feed_dict={b:1}))
writer.close()
49 changes: 49 additions & 0 deletions TESTS/distributed/distr_mutrec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import tensorflow as tf
from tensorflow.python.framework import function

cluster = tf.train.ClusterSpec({"local": ["localhost:2222", "localhost:2223"]})

f = function.Declare("F", [("n", tf.int32)], [("ret", tf.int32)])
g = function.Declare("G", [("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, func_name="F", out_names=["ret"])
def FImpl(n):

def f1():
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
ret = tf.constant(1)
return ret
def f2():
with tf.device("/job:local/replica:0/task:0/device:CPU:0"):
x = n - 1
ret = g(x)
return ret

# with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
pred = tf.less_equal(n, 1)

return tf.cond(pred, f1, f2)


@function.Defun(tf.int32, func_name="G", out_names=["ret"])
def GImpl(n):

with tf.device("/job:local/replica:0/task:1/device:CPU:0"):
x = n - 1
ret = f(x)
return ret


FImpl.add_to_graph(tf.get_default_graph())
GImpl.add_to_graph(tf.get_default_graph())


n = tf.placeholder(tf.int32, name="MyPlaceHolder")
x = f(n)

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

with tf.Session("grpc://localhost:2222") as sess:
print(sess.run([x], feed_dict={n:4}))

writer.close()
29 changes: 29 additions & 0 deletions TESTS/factorial.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import tensorflow as tf
from tensorflow.python.framework import function

fac = function.Declare("Fac", [("n", tf.int32)], [("ret", tf.int32)])

@function.Defun(tf.int32, func_name="Fac", out_names=["ret"])
def FacImpl(n):
return tf.cond(tf.less_equal(n, 1),
lambda: tf.constant(1),
lambda: n * fac(n - 1))


FacImpl.add_to_graph(tf.get_default_graph())

n = tf.placeholder(tf.int32, shape=[])
x = tf.add(n, 1)
result = fac(x)
y = tf.add(result, 1)

#print(tf.get_default_graph().as_graph_def())

writer = tf.summary.FileWriter('./graphs', tf.get_default_graph())

sess = tf.Session()
print(sess.run(y, feed_dict={n: 5}))

writer.close()

sess.close()
Loading